In [1]:
import pandas as pd
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import time
import matplotlib.pyplot as plt
import cv2
import seaborn as sns
sns.set_style('darkgrid')
import shutil
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.model_selection import train_test_split
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Dense, Activation,Dropout,Conv2D, MaxPooling2D,BatchNormalization
from tensorflow.keras.optimizers import Adam, Adamax
from tensorflow.keras.metrics import categorical_crossentropy
from tensorflow.keras import regularizers
from tensorflow.keras.models import Model
from tensorflow.keras import backend as K
from tqdm import tqdm
from sklearn.metrics import f1_score
import sys
if not sys.warnoptions:
    import warnings
    warnings.simplefilter("ignore")
pd.set_option('display.max_columns', None)  
pd.set_option('display.max_rows', None)  
pd.set_option('display.max_colwidth', None)  
print('All modules have been imported')
All modules have been imported

In [2]:
def print_in_color(txt_msg,fore_tupple=(255,255,255),back_tupple=(100,100,100)):
    rf,gf,bf=fore_tupple
    rb,gb,bb=back_tupple
    msg='{0}' + txt_msg
    mat='\33[38;2;' + str(rf) +';' + str(gf) + ';' + str(bf) + ';48;2;' + str(rb) + ';' +str(gb) + ';' + str(bb) +'m' 
    print(msg .format(mat), flush=True)
    print('\33[0m', flush=True) 
    return


msg='Test of default colors'
print_in_color(msg)
Test of default colors

In [3]:
def make_dataframes(train_dir,test_dir, val_dir):
    bad_images=[]
    dirlist=[train_dir, test_dir, val_dir]
    names=['train','test', 'valid']
    zipdir=zip(names, dirlist)
    for name,d in zipdir:
        filepaths=[]
        labels=[]
        classlist=sorted(os.listdir(d) )       
        for klass in classlist:
          
            if 'adenocarcinoma' in klass:
                label='adenocarcinoma'
            elif 'large.cell.carcinoma' in klass:
                label='large.cell'
            elif 'squamous.cell' in klass:
                label='squamous.cell'
            else:
                label='normal'
            classpath=os.path.join(d, klass)           
            flist=sorted(os.listdir(classpath)) 
            desc=f'{name:6s}-{label:25s}'
            for f in tqdm(flist, ncols=130,desc=desc, unit='files', colour='blue'):
                fpath=os.path.join(classpath,f)
                try:
                    img=cv2.imread(fpath)
                    shape=img.shape
                    filepaths.append(fpath)
                    labels.append(label)
                except:
                    print (fpath, ' is an invalid image file')
                    bad_images.append(fpath)
        Fseries=pd.Series(filepaths, name='filepaths')
        Lseries=pd.Series(labels, name='labels')
        df=pd.concat([Fseries, Lseries], axis=1) 
        if name =='valid':
            valid_df=df
        elif name == 'test':
            test_df=df
        else:
            train_df=df    
    classes=sorted(train_df['labels'].unique())
    class_count=len(classes)
    sample_df=train_df.sample(n=50, replace=False)

    ht=0
    wt=0
    count=0
    for i in range(len(sample_df)):
        fpath=sample_df['filepaths'].iloc[i]
        try:
            img=cv2.imread(fpath)
            h=img.shape[0]
            w=img.shape[1]
            wt +=w
            ht +=h
            count +=1
        except:
            pass
    have=int(ht/count)
    wave=int(wt/count)
    aspect_ratio=have/wave
    print('number of classes in processed dataset= ', class_count)    
    counts=list(train_df['labels'].value_counts())    
    print('the maximum files in any class in train_df is ', max(counts), '  the minimum files in any class in train_df is ', min(counts))
    print('train_df length: ', len(train_df), '  test_df length: ', len(test_df), '  valid_df length: ', len(valid_df))  
    print('average image height= ', have, '  average image width= ', wave, ' aspect ratio h/w= ', aspect_ratio) 
    if len(bad_images)>0:
        print_in_color('Below is a list of invalid image files')
        for f in bad_images:
            print (f)
    return train_df, test_df, valid_df, classes, class_count

train_dir = r'/Data/train'
val_dir=r'/Data/valid'
test_dir=r'/Data/test'
train_df, test_df, valid_df, classes, class_count=make_dataframes(train_dir, test_dir, val_dir)
    
train -adenocarcinoma           : 100%|██████████████████████████████████████████████████████| 195/195 [00:02<00:00, 68.54files/s]
train -large.cell               : 100%|██████████████████████████████████████████████████████| 115/115 [00:01<00:00, 83.30files/s]
train -normal                   : 100%|██████████████████████████████████████████████████████| 148/148 [00:02<00:00, 52.34files/s]
train -squamous.cell            : 100%|██████████████████████████████████████████████████████| 155/155 [00:02<00:00, 76.15files/s]
test  -adenocarcinoma           : 100%|██████████████████████████████████████████████████████| 120/120 [00:01<00:00, 60.53files/s]
test  -large.cell               : 100%|████████████████████████████████████████████████████████| 51/51 [00:00<00:00, 53.90files/s]
test  -normal                   : 100%|████████████████████████████████████████████████████████| 54/54 [00:01<00:00, 42.24files/s]
test  -squamous.cell            : 100%|████████████████████████████████████████████████████████| 90/90 [00:01<00:00, 57.81files/s]
valid -adenocarcinoma           : 100%|████████████████████████████████████████████████████████| 23/23 [00:00<00:00, 57.26files/s]
valid -large.cell               : 100%|████████████████████████████████████████████████████████| 21/21 [00:00<00:00, 54.27files/s]
valid -normal                   : 100%|████████████████████████████████████████████████████████| 13/13 [00:00<00:00, 45.78files/s]
valid -squamous.cell            : 100%|████████████████████████████████████████████████████████| 15/15 [00:00<00:00, 83.93files/s]
number of classes in processed dataset=  4
the maximum files in any class in train_df is  195   the minimum files in any class in train_df is  115
train_df length:  613   test_df length:  315   valid_df length:  72
average image height=  309   average image width=  452  aspect ratio h/w=  0.6836283185840708
In [5]:
def balance(df, n,column, working_dir, img_size):
    df=df.copy()
    print('Initial length of dataframe is ', len(df))
    aug_dir=os.path.join(working_dir, 'aug')
    if os.path.isdir(aug_dir):
        shutil.rmtree(aug_dir)
    os.mkdir(aug_dir)        
    for label in df[column].unique():    
        dir_path=os.path.join(aug_dir,label)    
        os.mkdir(dir_path) 
   
    total=0
    gen=ImageDataGenerator(horizontal_flip=True,  rotation_range=20, width_shift_range=.2,
                                  height_shift_range=.2, zoom_range=.2)
    groups=df.groupby(column)
    for label in df[column].unique():                 
        group=groups.get_group(label)  
        sample_count=len(group)   
        if sample_count< n: 
            aug_img_count=0
            delta=n - sample_count  
            target_dir=os.path.join(aug_dir, label)  
            msg='{0:40s} for class {1:^30s} creating {2:^5s} augmented images'.format(' ', label, str(delta))
            print(msg, '\r', end='') 
            aug_gen=gen.flow_from_dataframe( group,  x_col='filepaths', y_col=None, target_size=img_size,
                                            class_mode=None, batch_size=1, shuffle=False, 
                                            save_to_dir=target_dir, save_prefix='aug-', color_mode='rgb',
                                            save_format='jpg')
            while aug_img_count<delta:
                images=next(aug_gen)            
                aug_img_count += len(images)
            total +=aug_img_count
    print('Total Augmented images created= ', total)

    aug_fpaths=[]
    aug_labels=[]
    classlist=sorted(os.listdir(aug_dir))
    for klass in classlist:
        classpath=os.path.join(aug_dir, klass)     
        flist=sorted(os.listdir(classpath))    
        for f in flist:        
            fpath=os.path.join(classpath,f)         
            aug_fpaths.append(fpath)
            aug_labels.append(klass)
    Fseries=pd.Series(aug_fpaths, name='filepaths')
    Lseries=pd.Series(aug_labels, name='labels')   
    aug_df=pd.concat([Fseries, Lseries], axis=1)         
    df=pd.concat([df,aug_df], axis=0).reset_index(drop=True)
    print('Length of augmented dataframe is now ', len(df))
    return df 

n=400
working_dir=r'./'
column='labels'
img_size=(300,440)
train_df=balance(train_df, n, column, working_dir, img_size)
Initial length of dataframe is  613
Found 195 validated image filenames.     for class         adenocarcinoma         creating  205  augmented images 
Found 115 validated image filenames.     for class           large.cell           creating  285  augmented images 
Found 148 validated image filenames.     for class             normal             creating  252  augmented images 
Found 155 validated image filenames.     for class         squamous.cell          creating  245  augmented images 
Total Augmented images created=  987
Length of augmented dataframe is now  1600
In [6]:
def make_gens(batch_size, ycol, train_df, test_df, valid_df, img_size):
    trgen=ImageDataGenerator(horizontal_flip=True)    
    t_and_v_gen=ImageDataGenerator()
    msg='{0:70s} for train generator'.format(' ')
    print(msg, '\r', end='') 
    train_gen=trgen.flow_from_dataframe(train_df, x_col='filepaths', y_col=ycol, target_size=img_size,
                                       class_mode='categorical', color_mode='rgb', shuffle=True, batch_size=batch_size)
    msg='{0:70s} for valid generator'.format(' ')
    print(msg, '\r', end='') 
    valid_gen=t_and_v_gen.flow_from_dataframe(valid_df, x_col='filepaths', y_col=ycol, target_size=img_size,
                                       class_mode='categorical', color_mode='rgb', shuffle=False, batch_size=batch_size)
   
    length=len(test_df)
    test_batch_size=sorted([int(length/n) for n in range(1,length+1) if length % n ==0 and length/n<=80],reverse=True)[0]  
    test_steps=int(length/test_batch_size)    
    msg='{0:70s} for test generator'.format(' ')
    print(msg, '\r', end='') 
    test_gen=t_and_v_gen.flow_from_dataframe(test_df, x_col='filepaths', y_col=ycol, target_size=img_size,
                                       class_mode='categorical', color_mode='rgb', shuffle=False, batch_size=test_batch_size)
    
    classes=list(train_gen.class_indices.keys())
    class_indices=list(train_gen.class_indices.values())
    class_count=len(classes)
    labels=test_gen.labels
    print ( 'test batch size: ' ,test_batch_size, '  test steps: ', test_steps, ' number of classes : ', class_count)
    return train_gen, test_gen, valid_gen, test_steps
In [7]:
batch_size=20
ycol='labels'
train_gen, test_gen, valid_gen, test_steps= make_gens(batch_size, ycol, train_df, test_df, valid_df, img_size)
Found 1600 validated image filenames belonging to 4 classes.           for train generator 
Found 72 validated image filenames belonging to 4 classes.             for valid generator 
Found 315 validated image filenames belonging to 4 classes.            for test generator 
test batch size:  63   test steps:  5  number of classes :  4
In [8]:
def show_image_samples(gen ):
    t_dict=gen.class_indices
    classes=list(t_dict.keys())    
    images,labels=next(gen) 
    plt.figure(figsize=(25, 25))
    length=len(labels)
    if length<25:  
        r=length
    else:
        r=25
    for i in range(r):        
        plt.subplot(5, 5, i + 1)
        image=images[i] /255       
        plt.imshow(image)
        index=np.argmax(labels[i])
        class_name=classes[index]
        plt.title(class_name, color='blue', fontsize=18)
        plt.axis('off')
    plt.show()
    
show_image_samples(train_gen )
In [9]:
def make_model(img_size, lr, mod_num=3):  
    img_shape=(img_size[0], img_size[1], 3)
    if mod_num == 3:
        base_model=tf.keras.applications.efficientnet.EfficientNetB3(include_top=False, weights="imagenet",input_shape=img_shape, pooling='max') 
        msg='Created EfficientNet B3 model'

   
    base_model.trainable=True
    x=base_model.output
    x=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001 )(x)
    x = Dense(256, kernel_regularizer = regularizers.l2(l = 0.016),activity_regularizer=regularizers.l1(0.006),
                    bias_regularizer=regularizers.l1(0.006) ,activation='relu')(x)
    x=Dropout(rate=.4, seed=123)(x)       
    output=Dense(class_count, activation='softmax')(x)
    model=Model(inputs=base_model.input, outputs=output)
    model.compile(Adamax(learning_rate=lr), loss='categorical_crossentropy', metrics=['accuracy']) 
    msg=msg + f' with initial learning rate set to {lr}'
    print_in_color(msg)
    return model

lr=.001
model=make_model(img_size, lr) 
Downloading data from https://storage.googleapis.com/keras-applications/efficientnetb3_notop.h5
43941888/43941136 [==============================] - 1s 0us/step
43950080/43941136 [==============================] - 1s 0us/step
Created EfficientNet B3 model with initial learning rate set to 0.001

In [10]:
class LR_ASK(keras.callbacks.Callback):
    def __init__ (self, model, epochs,  ask_epoch, dwell=True, factor=.4): 
        super(LR_ASK, self).__init__()
        self.model=model               
        self.ask_epoch=ask_epoch
        self.epochs=epochs
        self.ask=True 
        self.lowest_vloss=np.inf
        self.lowest_aloss=np.inf
        self.best_weights=self.model.get_weights() 
        self.best_epoch=1
        self.plist=[]
        self.alist=[]
        self.dwell= dwell
        self.factor=factor
        
    def get_list(self): 
        return self.plist, self.alist
    def on_train_begin(self, logs=None): 
        if self.ask_epoch == 0: 
            print('you set ask_epoch = 0, ask_epoch will be set to 1', flush=True)
            self.ask_epoch=1
        if self.ask_epoch >= self.epochs: 
            print('ask_epoch >= epochs, will train for ', epochs, ' epochs', flush=True)
            self.ask=False 
        if self.epochs == 1:
            self.ask=False
        else:
            msg =f'Training will proceed until epoch {ask_epoch} then you will be asked to' 
            print_in_color(msg )
            msg='enter H to halt training or enter an integer for how many more epochs to run then be asked again'
            print_in_color(msg)
            if self.dwell:
                msg='learning rate will be automatically adjusted during training'
                print_in_color(msg, (0,255,0))
        self.start_time= time.time()
       
    def on_train_end(self, logs=None):  
        msg=f'loading model with weights from epoch {self.best_epoch}'
        print_in_color(msg, (0,255,255))
        self.model.set_weights(self.best_weights) 
        tr_duration=time.time() - self.start_time           
        hours = tr_duration // 3600
        minutes = (tr_duration - (hours * 3600)) // 60
        seconds = tr_duration - ((hours * 3600) + (minutes * 60))
        msg = f'training elapsed time was {str(hours)} hours, {minutes:4.1f} minutes, {seconds:4.2f} seconds)'
        print_in_color (msg) 
        
    def on_epoch_end(self, epoch, logs=None):  
        vloss=logs.get('val_loss')  
        aloss=logs.get('loss')
        if epoch >0:
            deltav = self.lowest_vloss- vloss 
            pimprov=(deltav/self.lowest_vloss) * 100 
            self.plist.append(pimprov)
            deltaa=self.lowest_aloss-aloss
            aimprov=(deltaa/self.lowest_aloss) * 100
            self.alist.append(aimprov)
        else:
            pimprov=0.0 
            aimprov=0.0
        if vloss< self.lowest_vloss:
            self.lowest_vloss=vloss
            self.best_weights=self.model.get_weights() 
            self.best_epoch=epoch + 1            
            msg=f'\n validation loss of {vloss:7.4f} is {pimprov:7.4f} % below lowest loss, saving weights from epoch {str(epoch + 1):3s} as best weights'
            print_in_color(msg, (0,255,0))
        else:
            pimprov=abs(pimprov)
            msg=f'\n validation loss of {vloss:7.4f} is {pimprov:7.4f} % above lowest loss of {self.lowest_vloss:7.4f} keeping weights from epoch {str(self.best_epoch)} as best weights'
            print_in_color(msg, (255,255,0)) 
            if self.dwell:
                lr=float(tf.keras.backend.get_value(self.model.optimizer.lr)) 
                new_lr=lr * self.factor
                msg=f'learning rate was automatically adjusted from {lr:8.6f} to {new_lr:8.6f}, model weights set to best weights'
                print_in_color(msg) 
                tf.keras.backend.set_value(self.model.optimizer.lr, new_lr) 
                self.model.set_weights(self.best_weights)      
                
        if aloss< self.lowest_aloss:
            self.lowest_aloss=aloss        
        if self.ask: 
            if epoch + 1 ==self.ask_epoch: 
                msg='\n Enter H to end training or  an integer for the number of additional epochs to run then ask again'
                print_in_color(msg) 
                ans=input()
                
                if ans == 'H' or ans =='h' or ans == '0': 
                    msg=f'you entered {ans},  Training halted on epoch {epoch+1} due to user input\n'
                    print_in_color(msg)
                    self.model.stop_training = True 
                else: 
                    self.ask_epoch += int(ans)
                    if self.ask_epoch > self.epochs:
                        print('\nYou specified maximum epochs of as ', self.epochs, ' cannot train for ', self.ask_epoch, flush =True)
                    else:
                        msg=f'you entered {ans} Training will continue to epoch {self.ask_epoch}'
                        print_in_color(msg) 
                        if self.dwell==False:
                            lr=float(tf.keras.backend.get_value(self.model.optimizer.lr)) 
                            msg=f'current LR is  {lr:8.6f}  hit enter to keep  this LR or enter a new LR'
                            print_in_color(msg) 
                            ans=input(' ')
                            if ans =='':
                                msg=f'keeping current LR of {lr:7.5f}'
                                print_in_color(msg) 
                            else:
                                new_lr=float(ans)
                                tf.keras.backend.set_value(self.model.optimizer.lr, new_lr) 
                                msg=f' changing LR to {ans}'
                                print_in_color(msg) 

Instantiate custom callback

In [11]:
epochs=40
ask_epoch=25
ask=LR_ASK(model, epochs,  ask_epoch)
callbacks=[ask]
In [12]:
history=model.fit(x=train_gen,   epochs=epochs, verbose=1, callbacks=callbacks,  validation_data=valid_gen,
               validation_steps=None,  shuffle=False,  initial_epoch=0)
Training will proceed until epoch 25 then you will be asked to

enter H to halt training or enter an integer for how many more epochs to run then be asked again

learning rate will be automatically adjusted during training

Epoch 1/40
80/80 [==============================] - 114s 1s/step - loss: 8.0958 - accuracy: 0.6731 - val_loss: 7.7110 - val_accuracy: 0.6250

 validation loss of  7.7110 is  0.0000 % below lowest loss, saving weights from epoch 1   as best weights

Epoch 2/40
80/80 [==============================] - 81s 1s/step - loss: 6.1459 - accuracy: 0.9100 - val_loss: 5.9955 - val_accuracy: 0.7361

 validation loss of  5.9955 is 22.2469 % below lowest loss, saving weights from epoch 2   as best weights

Epoch 3/40
80/80 [==============================] - 81s 1s/step - loss: 5.0317 - accuracy: 0.9681 - val_loss: 4.8470 - val_accuracy: 0.8333

 validation loss of  4.8470 is 19.1560 % below lowest loss, saving weights from epoch 3   as best weights

Epoch 4/40
80/80 [==============================] - 81s 1s/step - loss: 4.1968 - accuracy: 0.9756 - val_loss: 4.1240 - val_accuracy: 0.8611

 validation loss of  4.1240 is 14.9170 % below lowest loss, saving weights from epoch 4   as best weights

Epoch 5/40
80/80 [==============================] - 81s 1s/step - loss: 3.4972 - accuracy: 0.9869 - val_loss: 3.3428 - val_accuracy: 0.9167

 validation loss of  3.3428 is 18.9429 % below lowest loss, saving weights from epoch 5   as best weights

Epoch 6/40
80/80 [==============================] - 81s 1s/step - loss: 2.9144 - accuracy: 0.9925 - val_loss: 2.7868 - val_accuracy: 0.9167

 validation loss of  2.7868 is 16.6329 % below lowest loss, saving weights from epoch 6   as best weights

Epoch 7/40
80/80 [==============================] - 81s 1s/step - loss: 2.4250 - accuracy: 0.9950 - val_loss: 2.3528 - val_accuracy: 0.9167

 validation loss of  2.3528 is 15.5721 % below lowest loss, saving weights from epoch 7   as best weights

Epoch 8/40
80/80 [==============================] - 81s 1s/step - loss: 2.0494 - accuracy: 0.9881 - val_loss: 2.0320 - val_accuracy: 0.8889

 validation loss of  2.0320 is 13.6362 % below lowest loss, saving weights from epoch 8   as best weights

Epoch 9/40
80/80 [==============================] - 81s 1s/step - loss: 1.6923 - accuracy: 0.9962 - val_loss: 1.6888 - val_accuracy: 0.9167

 validation loss of  1.6888 is 16.8894 % below lowest loss, saving weights from epoch 9   as best weights

Epoch 10/40
80/80 [==============================] - 81s 1s/step - loss: 1.4129 - accuracy: 0.9944 - val_loss: 1.5068 - val_accuracy: 0.9167

 validation loss of  1.5068 is 10.7743 % below lowest loss, saving weights from epoch 10  as best weights

Epoch 11/40
80/80 [==============================] - 81s 1s/step - loss: 1.1631 - accuracy: 0.9981 - val_loss: 1.2740 - val_accuracy: 0.9028

 validation loss of  1.2740 is 15.4555 % below lowest loss, saving weights from epoch 11  as best weights

Epoch 12/40
80/80 [==============================] - 81s 1s/step - loss: 0.9870 - accuracy: 0.9912 - val_loss: 1.1228 - val_accuracy: 0.8750

 validation loss of  1.1228 is 11.8663 % below lowest loss, saving weights from epoch 12  as best weights

Epoch 13/40
80/80 [==============================] - 81s 1s/step - loss: 0.8356 - accuracy: 0.9900 - val_loss: 0.9869 - val_accuracy: 0.8889

 validation loss of  0.9869 is 12.1017 % below lowest loss, saving weights from epoch 13  as best weights

Epoch 14/40
80/80 [==============================] - 81s 1s/step - loss: 0.7000 - accuracy: 0.9937 - val_loss: 0.7914 - val_accuracy: 0.9444

 validation loss of  0.7914 is 19.8101 % below lowest loss, saving weights from epoch 14  as best weights

Epoch 15/40
80/80 [==============================] - 81s 1s/step - loss: 0.5910 - accuracy: 0.9919 - val_loss: 0.6854 - val_accuracy: 0.9306

 validation loss of  0.6854 is 13.3885 % below lowest loss, saving weights from epoch 15  as best weights

Epoch 16/40
80/80 [==============================] - 81s 1s/step - loss: 0.4980 - accuracy: 0.9962 - val_loss: 0.6752 - val_accuracy: 0.9028

 validation loss of  0.6752 is  1.5000 % below lowest loss, saving weights from epoch 16  as best weights

Epoch 17/40
80/80 [==============================] - 81s 1s/step - loss: 0.4210 - accuracy: 0.9987 - val_loss: 0.5294 - val_accuracy: 0.9444

 validation loss of  0.5294 is 21.5862 % below lowest loss, saving weights from epoch 17  as best weights

Epoch 18/40
80/80 [==============================] - 81s 1s/step - loss: 0.3726 - accuracy: 0.9937 - val_loss: 0.7558 - val_accuracy: 0.8056

 validation loss of  0.7558 is 42.7534 % above lowest loss of  0.5294 keeping weights from epoch 17 as best weights

learning rate was automatically adjusted from 0.001000 to 0.000400, model weights set to best weights

Epoch 19/40
80/80 [==============================] - 81s 1s/step - loss: 0.3713 - accuracy: 0.9975 - val_loss: 0.4983 - val_accuracy: 0.9444

 validation loss of  0.4983 is  5.8744 % below lowest loss, saving weights from epoch 19  as best weights

Epoch 20/40
80/80 [==============================] - 81s 1s/step - loss: 0.3574 - accuracy: 0.9925 - val_loss: 0.4852 - val_accuracy: 0.9306

 validation loss of  0.4852 is  2.6230 % below lowest loss, saving weights from epoch 20  as best weights

Epoch 21/40
80/80 [==============================] - 81s 1s/step - loss: 0.3324 - accuracy: 0.9962 - val_loss: 0.4654 - val_accuracy: 0.9444

 validation loss of  0.4654 is  4.0829 % below lowest loss, saving weights from epoch 21  as best weights

Epoch 22/40
80/80 [==============================] - 81s 1s/step - loss: 0.3073 - accuracy: 0.9962 - val_loss: 0.4985 - val_accuracy: 0.9167

 validation loss of  0.4985 is  7.1064 % above lowest loss of  0.4654 keeping weights from epoch 21 as best weights

learning rate was automatically adjusted from 0.000400 to 0.000160, model weights set to best weights

Epoch 23/40
80/80 [==============================] - 80s 998ms/step - loss: 0.3106 - accuracy: 0.9969 - val_loss: 0.4335 - val_accuracy: 0.9444

 validation loss of  0.4335 is  6.8587 % below lowest loss, saving weights from epoch 23  as best weights

Epoch 24/40
80/80 [==============================] - 81s 1s/step - loss: 0.3046 - accuracy: 0.9987 - val_loss: 0.4625 - val_accuracy: 0.9444

 validation loss of  0.4625 is  6.6804 % above lowest loss of  0.4335 keeping weights from epoch 23 as best weights

learning rate was automatically adjusted from 0.000160 to 0.000064, model weights set to best weights

Epoch 25/40
80/80 [==============================] - 82s 1s/step - loss: 0.3062 - accuracy: 0.9969 - val_loss: 0.4492 - val_accuracy: 0.9444

 validation loss of  0.4492 is  3.6199 % above lowest loss of  0.4335 keeping weights from epoch 23 as best weights

learning rate was automatically adjusted from 0.000064 to 0.000026, model weights set to best weights


 Enter H to end training or  an integer for the number of additional epochs to run then ask again

you entered H,  Training halted on epoch 25 due to user input


loading model with weights from epoch 23

training elapsed time was 0.0 hours, 54.0 minutes, 48.37 seconds)

Define a function to plot the training data

In [14]:
def tr_plot(tr_data, start_epoch):
    
    tacc=tr_data.history['accuracy']
    tloss=tr_data.history['loss']
    vacc=tr_data.history['val_accuracy']
    vloss=tr_data.history['val_loss']
    Epoch_count=len(tacc)+ start_epoch
    Epochs=[]
    for i in range (start_epoch ,Epoch_count):
        Epochs.append(i+1)   
    index_loss=np.argmin(vloss)
    val_lowest=vloss[index_loss]
    index_acc=np.argmax(vacc)
    acc_highest=vacc[index_acc]
    plt.style.use('fivethirtyeight')    
    sc_label='best epoch= '+ str(index_loss+1 +start_epoch)
    vc_label='best epoch= '+ str(index_acc + 1+ start_epoch)
    fig,axes=plt.subplots(nrows=1, ncols=2, figsize=(25,10))
    axes[0].plot(Epochs,tloss, 'r', label='Training loss')
    axes[0].plot(Epochs,vloss,'g',label='Validation loss' )
    axes[0].scatter(index_loss+1 +start_epoch,val_lowest, s=150, c= 'blue', label=sc_label)
    axes[0].scatter(Epochs, tloss, s=100, c='red')    
    axes[0].set_title('Training and Validation Loss')
    axes[0].set_xlabel('Epochs', fontsize=18)
    axes[0].set_ylabel('Loss', fontsize=18)
    axes[0].legend()
    axes[1].plot (Epochs,tacc,'r',label= 'Training Accuracy')
    axes[1].scatter(Epochs, tacc, s=100, c='red')
    axes[1].plot (Epochs,vacc,'g',label= 'Validation Accuracy')
    axes[1].scatter(index_acc+1 +start_epoch,acc_highest, s=150, c= 'blue', label=vc_label)
    axes[1].set_title('Training and Validation Accuracy')
    axes[1].set_xlabel('Epochs', fontsize=18)
    axes[1].set_ylabel('Accuracy', fontsize=18)
    axes[1].legend()
    plt.tight_layout    
    plt.show()
    
    
tr_plot(history,0)
In [15]:
def predictor(test_gen):    
    y_pred= []
    error_list=[]
    error_pred_list = []
    y_true=test_gen.labels
    classes=list(test_gen.class_indices.keys())
    class_count=len(classes)
    errors=0
    preds=model.predict(test_gen, verbose=1)
    tests=len(preds)    
    for i, p in enumerate(preds):        
        pred_index=np.argmax(p)         
        true_index=test_gen.labels[i]        
        if pred_index != true_index:                                           
            errors=errors + 1
            file=test_gen.filenames[i]
            error_list.append(file)
            error_class=classes[pred_index]
            error_pred_list.append(error_class)
        y_pred.append(pred_index)
            
    acc=( 1-errors/tests) * 100
    msg=f'there were {errors} errors in {tests} tests for an accuracy of {acc:6.2f}'
    print_in_color(msg, (0,255,255), (100,100,100))
    ypred=np.array(y_pred)
    ytrue=np.array(y_true)
    f1score=f1_score(ytrue, ypred, average='weighted')* 100
    if class_count <=30:
        cm = confusion_matrix(ytrue, ypred )
       
        plt.figure(figsize=(12, 8))
        sns.heatmap(cm, annot=True, vmin=0, fmt='g', cmap='Blues', cbar=False)       
        plt.xticks(np.arange(class_count)+.5, classes, rotation=90)
        plt.yticks(np.arange(class_count)+.5, classes, rotation=0)
        plt.xlabel("Predicted")
        plt.ylabel("Actual")
        plt.title("Confusion Matrix")
        plt.show()
    clr = classification_report(y_true, y_pred, target_names=classes, digits= 4)
    print("Classification Report:\n----------------------\n", clr)
    return errors, tests, error_list, error_pred_list, f1score

errors, tests, error_list, error_pred_list, f1score =predictor(test_gen)
5/5 [==============================] - 8s 526ms/step
there were 18 errors in 315 tests for an accuracy of  94.29

Classification Report:
----------------------
                 precision    recall  f1-score   support

adenocarcinoma     0.9048    0.9500    0.9268       120
    large.cell     0.8947    1.0000    0.9444        51
        normal     1.0000    0.9815    0.9907        54
 squamous.cell     1.0000    0.8778    0.9349        90

      accuracy                         0.9429       315
     macro avg     0.9499    0.9523    0.9492       315
  weighted avg     0.9467    0.9429    0.9429       315

In [21]:
def print_errors(error_list):
    if len(error_list) == 0:
        print_in_color('There were no errors in predicting the test set')
    else:
        if len(error_list)<50:
            print ('Below is a list of test files that were miss classified \n')
            print ('{0:^30s}{1:^30s}'.format('Test File', ' Predicted as'))            
            for i in range(len(error_list)):
                fpath=error_list[i]        
                split=fpath.split('/')        
                f=split[4]+ '-' + split[5]
                print(f'{f:^30s}{error_pred_list[i]:^30s}')
In [17]:
print_errors(error_list)
In [16]:
def save_model(subject, classes, img_size, f1score, working_dir):
    name=subject + '-' + str(len(classes)) + '-(' + str(img_size[0]) + ' X ' + str(img_size[1]) + ')'
    save_id=f'{name}-{f1score:5.2f}.h5'
    model_save_loc=os.path.join(working_dir, save_id)
    model.save(model_save_loc)
    msg= f'model was saved as {model_save_loc}'
    print_in_color(msg, (0,255,255), (100,100,100)) 
In [17]:
working_dir=r'./'
subject='chest scans'
save_model(subject, classes, img_size, f1score, working_dir)
model was saved as ./chest scans-4-(300 X 440)-94.29.h5

In [18]:
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 300, 440, 3) 0                                            
__________________________________________________________________________________________________
rescaling (Rescaling)           (None, 300, 440, 3)  0           input_1[0][0]                    
__________________________________________________________________________________________________
normalization (Normalization)   (None, 300, 440, 3)  7           rescaling[0][0]                  
__________________________________________________________________________________________________
stem_conv_pad (ZeroPadding2D)   (None, 301, 441, 3)  0           normalization[0][0]              
__________________________________________________________________________________________________
stem_conv (Conv2D)              (None, 150, 220, 40) 1080        stem_conv_pad[0][0]              
__________________________________________________________________________________________________
stem_bn (BatchNormalization)    (None, 150, 220, 40) 160         stem_conv[0][0]                  
__________________________________________________________________________________________________
stem_activation (Activation)    (None, 150, 220, 40) 0           stem_bn[0][0]                    
__________________________________________________________________________________________________
block1a_dwconv (DepthwiseConv2D (None, 150, 220, 40) 360         stem_activation[0][0]            
__________________________________________________________________________________________________
block1a_bn (BatchNormalization) (None, 150, 220, 40) 160         block1a_dwconv[0][0]             
__________________________________________________________________________________________________
block1a_activation (Activation) (None, 150, 220, 40) 0           block1a_bn[0][0]                 
__________________________________________________________________________________________________
block1a_se_squeeze (GlobalAvera (None, 40)           0           block1a_activation[0][0]         
__________________________________________________________________________________________________
block1a_se_reshape (Reshape)    (None, 1, 1, 40)     0           block1a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block1a_se_reduce (Conv2D)      (None, 1, 1, 10)     410         block1a_se_reshape[0][0]         
__________________________________________________________________________________________________
block1a_se_expand (Conv2D)      (None, 1, 1, 40)     440         block1a_se_reduce[0][0]          
__________________________________________________________________________________________________
block1a_se_excite (Multiply)    (None, 150, 220, 40) 0           block1a_activation[0][0]         
                                                                 block1a_se_expand[0][0]          
__________________________________________________________________________________________________
block1a_project_conv (Conv2D)   (None, 150, 220, 24) 960         block1a_se_excite[0][0]          
__________________________________________________________________________________________________
block1a_project_bn (BatchNormal (None, 150, 220, 24) 96          block1a_project_conv[0][0]       
__________________________________________________________________________________________________
block1b_dwconv (DepthwiseConv2D (None, 150, 220, 24) 216         block1a_project_bn[0][0]         
__________________________________________________________________________________________________
block1b_bn (BatchNormalization) (None, 150, 220, 24) 96          block1b_dwconv[0][0]             
__________________________________________________________________________________________________
block1b_activation (Activation) (None, 150, 220, 24) 0           block1b_bn[0][0]                 
__________________________________________________________________________________________________
block1b_se_squeeze (GlobalAvera (None, 24)           0           block1b_activation[0][0]         
__________________________________________________________________________________________________
block1b_se_reshape (Reshape)    (None, 1, 1, 24)     0           block1b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block1b_se_reduce (Conv2D)      (None, 1, 1, 6)      150         block1b_se_reshape[0][0]         
__________________________________________________________________________________________________
block1b_se_expand (Conv2D)      (None, 1, 1, 24)     168         block1b_se_reduce[0][0]          
__________________________________________________________________________________________________
block1b_se_excite (Multiply)    (None, 150, 220, 24) 0           block1b_activation[0][0]         
                                                                 block1b_se_expand[0][0]          
__________________________________________________________________________________________________
block1b_project_conv (Conv2D)   (None, 150, 220, 24) 576         block1b_se_excite[0][0]          
__________________________________________________________________________________________________
block1b_project_bn (BatchNormal (None, 150, 220, 24) 96          block1b_project_conv[0][0]       
__________________________________________________________________________________________________
block1b_drop (Dropout)          (None, 150, 220, 24) 0           block1b_project_bn[0][0]         
__________________________________________________________________________________________________
block1b_add (Add)               (None, 150, 220, 24) 0           block1b_drop[0][0]               
                                                                 block1a_project_bn[0][0]         
__________________________________________________________________________________________________
block2a_expand_conv (Conv2D)    (None, 150, 220, 144 3456        block1b_add[0][0]                
__________________________________________________________________________________________________
block2a_expand_bn (BatchNormali (None, 150, 220, 144 576         block2a_expand_conv[0][0]        
__________________________________________________________________________________________________
block2a_expand_activation (Acti (None, 150, 220, 144 0           block2a_expand_bn[0][0]          
__________________________________________________________________________________________________
block2a_dwconv_pad (ZeroPadding (None, 151, 221, 144 0           block2a_expand_activation[0][0]  
__________________________________________________________________________________________________
block2a_dwconv (DepthwiseConv2D (None, 75, 110, 144) 1296        block2a_dwconv_pad[0][0]         
__________________________________________________________________________________________________
block2a_bn (BatchNormalization) (None, 75, 110, 144) 576         block2a_dwconv[0][0]             
__________________________________________________________________________________________________
block2a_activation (Activation) (None, 75, 110, 144) 0           block2a_bn[0][0]                 
__________________________________________________________________________________________________
block2a_se_squeeze (GlobalAvera (None, 144)          0           block2a_activation[0][0]         
__________________________________________________________________________________________________
block2a_se_reshape (Reshape)    (None, 1, 1, 144)    0           block2a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block2a_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block2a_se_reshape[0][0]         
__________________________________________________________________________________________________
block2a_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block2a_se_reduce[0][0]          
__________________________________________________________________________________________________
block2a_se_excite (Multiply)    (None, 75, 110, 144) 0           block2a_activation[0][0]         
                                                                 block2a_se_expand[0][0]          
__________________________________________________________________________________________________
block2a_project_conv (Conv2D)   (None, 75, 110, 32)  4608        block2a_se_excite[0][0]          
__________________________________________________________________________________________________
block2a_project_bn (BatchNormal (None, 75, 110, 32)  128         block2a_project_conv[0][0]       
__________________________________________________________________________________________________
block2b_expand_conv (Conv2D)    (None, 75, 110, 192) 6144        block2a_project_bn[0][0]         
__________________________________________________________________________________________________
block2b_expand_bn (BatchNormali (None, 75, 110, 192) 768         block2b_expand_conv[0][0]        
__________________________________________________________________________________________________
block2b_expand_activation (Acti (None, 75, 110, 192) 0           block2b_expand_bn[0][0]          
__________________________________________________________________________________________________
block2b_dwconv (DepthwiseConv2D (None, 75, 110, 192) 1728        block2b_expand_activation[0][0]  
__________________________________________________________________________________________________
block2b_bn (BatchNormalization) (None, 75, 110, 192) 768         block2b_dwconv[0][0]             
__________________________________________________________________________________________________
block2b_activation (Activation) (None, 75, 110, 192) 0           block2b_bn[0][0]                 
__________________________________________________________________________________________________
block2b_se_squeeze (GlobalAvera (None, 192)          0           block2b_activation[0][0]         
__________________________________________________________________________________________________
block2b_se_reshape (Reshape)    (None, 1, 1, 192)    0           block2b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block2b_se_reduce (Conv2D)      (None, 1, 1, 8)      1544        block2b_se_reshape[0][0]         
__________________________________________________________________________________________________
block2b_se_expand (Conv2D)      (None, 1, 1, 192)    1728        block2b_se_reduce[0][0]          
__________________________________________________________________________________________________
block2b_se_excite (Multiply)    (None, 75, 110, 192) 0           block2b_activation[0][0]         
                                                                 block2b_se_expand[0][0]          
__________________________________________________________________________________________________
block2b_project_conv (Conv2D)   (None, 75, 110, 32)  6144        block2b_se_excite[0][0]          
__________________________________________________________________________________________________
block2b_project_bn (BatchNormal (None, 75, 110, 32)  128         block2b_project_conv[0][0]       
__________________________________________________________________________________________________
block2b_drop (Dropout)          (None, 75, 110, 32)  0           block2b_project_bn[0][0]         
__________________________________________________________________________________________________
block2b_add (Add)               (None, 75, 110, 32)  0           block2b_drop[0][0]               
                                                                 block2a_project_bn[0][0]         
__________________________________________________________________________________________________
block2c_expand_conv (Conv2D)    (None, 75, 110, 192) 6144        block2b_add[0][0]                
__________________________________________________________________________________________________
block2c_expand_bn (BatchNormali (None, 75, 110, 192) 768         block2c_expand_conv[0][0]        
__________________________________________________________________________________________________
block2c_expand_activation (Acti (None, 75, 110, 192) 0           block2c_expand_bn[0][0]          
__________________________________________________________________________________________________
block2c_dwconv (DepthwiseConv2D (None, 75, 110, 192) 1728        block2c_expand_activation[0][0]  
__________________________________________________________________________________________________
block2c_bn (BatchNormalization) (None, 75, 110, 192) 768         block2c_dwconv[0][0]             
__________________________________________________________________________________________________
block2c_activation (Activation) (None, 75, 110, 192) 0           block2c_bn[0][0]                 
__________________________________________________________________________________________________
block2c_se_squeeze (GlobalAvera (None, 192)          0           block2c_activation[0][0]         
__________________________________________________________________________________________________
block2c_se_reshape (Reshape)    (None, 1, 1, 192)    0           block2c_se_squeeze[0][0]         
__________________________________________________________________________________________________
block2c_se_reduce (Conv2D)      (None, 1, 1, 8)      1544        block2c_se_reshape[0][0]         
__________________________________________________________________________________________________
block2c_se_expand (Conv2D)      (None, 1, 1, 192)    1728        block2c_se_reduce[0][0]          
__________________________________________________________________________________________________
block2c_se_excite (Multiply)    (None, 75, 110, 192) 0           block2c_activation[0][0]         
                                                                 block2c_se_expand[0][0]          
__________________________________________________________________________________________________
block2c_project_conv (Conv2D)   (None, 75, 110, 32)  6144        block2c_se_excite[0][0]          
__________________________________________________________________________________________________
block2c_project_bn (BatchNormal (None, 75, 110, 32)  128         block2c_project_conv[0][0]       
__________________________________________________________________________________________________
block2c_drop (Dropout)          (None, 75, 110, 32)  0           block2c_project_bn[0][0]         
__________________________________________________________________________________________________
block2c_add (Add)               (None, 75, 110, 32)  0           block2c_drop[0][0]               
                                                                 block2b_add[0][0]                
__________________________________________________________________________________________________
block3a_expand_conv (Conv2D)    (None, 75, 110, 192) 6144        block2c_add[0][0]                
__________________________________________________________________________________________________
block3a_expand_bn (BatchNormali (None, 75, 110, 192) 768         block3a_expand_conv[0][0]        
__________________________________________________________________________________________________
block3a_expand_activation (Acti (None, 75, 110, 192) 0           block3a_expand_bn[0][0]          
__________________________________________________________________________________________________
block3a_dwconv_pad (ZeroPadding (None, 79, 113, 192) 0           block3a_expand_activation[0][0]  
__________________________________________________________________________________________________
block3a_dwconv (DepthwiseConv2D (None, 38, 55, 192)  4800        block3a_dwconv_pad[0][0]         
__________________________________________________________________________________________________
block3a_bn (BatchNormalization) (None, 38, 55, 192)  768         block3a_dwconv[0][0]             
__________________________________________________________________________________________________
block3a_activation (Activation) (None, 38, 55, 192)  0           block3a_bn[0][0]                 
__________________________________________________________________________________________________
block3a_se_squeeze (GlobalAvera (None, 192)          0           block3a_activation[0][0]         
__________________________________________________________________________________________________
block3a_se_reshape (Reshape)    (None, 1, 1, 192)    0           block3a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block3a_se_reduce (Conv2D)      (None, 1, 1, 8)      1544        block3a_se_reshape[0][0]         
__________________________________________________________________________________________________
block3a_se_expand (Conv2D)      (None, 1, 1, 192)    1728        block3a_se_reduce[0][0]          
__________________________________________________________________________________________________
block3a_se_excite (Multiply)    (None, 38, 55, 192)  0           block3a_activation[0][0]         
                                                                 block3a_se_expand[0][0]          
__________________________________________________________________________________________________
block3a_project_conv (Conv2D)   (None, 38, 55, 48)   9216        block3a_se_excite[0][0]          
__________________________________________________________________________________________________
block3a_project_bn (BatchNormal (None, 38, 55, 48)   192         block3a_project_conv[0][0]       
__________________________________________________________________________________________________
block3b_expand_conv (Conv2D)    (None, 38, 55, 288)  13824       block3a_project_bn[0][0]         
__________________________________________________________________________________________________
block3b_expand_bn (BatchNormali (None, 38, 55, 288)  1152        block3b_expand_conv[0][0]        
__________________________________________________________________________________________________
block3b_expand_activation (Acti (None, 38, 55, 288)  0           block3b_expand_bn[0][0]          
__________________________________________________________________________________________________
block3b_dwconv (DepthwiseConv2D (None, 38, 55, 288)  7200        block3b_expand_activation[0][0]  
__________________________________________________________________________________________________
block3b_bn (BatchNormalization) (None, 38, 55, 288)  1152        block3b_dwconv[0][0]             
__________________________________________________________________________________________________
block3b_activation (Activation) (None, 38, 55, 288)  0           block3b_bn[0][0]                 
__________________________________________________________________________________________________
block3b_se_squeeze (GlobalAvera (None, 288)          0           block3b_activation[0][0]         
__________________________________________________________________________________________________
block3b_se_reshape (Reshape)    (None, 1, 1, 288)    0           block3b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block3b_se_reduce (Conv2D)      (None, 1, 1, 12)     3468        block3b_se_reshape[0][0]         
__________________________________________________________________________________________________
block3b_se_expand (Conv2D)      (None, 1, 1, 288)    3744        block3b_se_reduce[0][0]          
__________________________________________________________________________________________________
block3b_se_excite (Multiply)    (None, 38, 55, 288)  0           block3b_activation[0][0]         
                                                                 block3b_se_expand[0][0]          
__________________________________________________________________________________________________
block3b_project_conv (Conv2D)   (None, 38, 55, 48)   13824       block3b_se_excite[0][0]          
__________________________________________________________________________________________________
block3b_project_bn (BatchNormal (None, 38, 55, 48)   192         block3b_project_conv[0][0]       
__________________________________________________________________________________________________
block3b_drop (Dropout)          (None, 38, 55, 48)   0           block3b_project_bn[0][0]         
__________________________________________________________________________________________________
block3b_add (Add)               (None, 38, 55, 48)   0           block3b_drop[0][0]               
                                                                 block3a_project_bn[0][0]         
__________________________________________________________________________________________________
block3c_expand_conv (Conv2D)    (None, 38, 55, 288)  13824       block3b_add[0][0]                
__________________________________________________________________________________________________
block3c_expand_bn (BatchNormali (None, 38, 55, 288)  1152        block3c_expand_conv[0][0]        
__________________________________________________________________________________________________
block3c_expand_activation (Acti (None, 38, 55, 288)  0           block3c_expand_bn[0][0]          
__________________________________________________________________________________________________
block3c_dwconv (DepthwiseConv2D (None, 38, 55, 288)  7200        block3c_expand_activation[0][0]  
__________________________________________________________________________________________________
block3c_bn (BatchNormalization) (None, 38, 55, 288)  1152        block3c_dwconv[0][0]             
__________________________________________________________________________________________________
block3c_activation (Activation) (None, 38, 55, 288)  0           block3c_bn[0][0]                 
__________________________________________________________________________________________________
block3c_se_squeeze (GlobalAvera (None, 288)          0           block3c_activation[0][0]         
__________________________________________________________________________________________________
block3c_se_reshape (Reshape)    (None, 1, 1, 288)    0           block3c_se_squeeze[0][0]         
__________________________________________________________________________________________________
block3c_se_reduce (Conv2D)      (None, 1, 1, 12)     3468        block3c_se_reshape[0][0]         
__________________________________________________________________________________________________
block3c_se_expand (Conv2D)      (None, 1, 1, 288)    3744        block3c_se_reduce[0][0]          
__________________________________________________________________________________________________
block3c_se_excite (Multiply)    (None, 38, 55, 288)  0           block3c_activation[0][0]         
                                                                 block3c_se_expand[0][0]          
__________________________________________________________________________________________________
block3c_project_conv (Conv2D)   (None, 38, 55, 48)   13824       block3c_se_excite[0][0]          
__________________________________________________________________________________________________
block3c_project_bn (BatchNormal (None, 38, 55, 48)   192         block3c_project_conv[0][0]       
__________________________________________________________________________________________________
block3c_drop (Dropout)          (None, 38, 55, 48)   0           block3c_project_bn[0][0]         
__________________________________________________________________________________________________
block3c_add (Add)               (None, 38, 55, 48)   0           block3c_drop[0][0]               
                                                                 block3b_add[0][0]                
__________________________________________________________________________________________________
block4a_expand_conv (Conv2D)    (None, 38, 55, 288)  13824       block3c_add[0][0]                
__________________________________________________________________________________________________
block4a_expand_bn (BatchNormali (None, 38, 55, 288)  1152        block4a_expand_conv[0][0]        
__________________________________________________________________________________________________
block4a_expand_activation (Acti (None, 38, 55, 288)  0           block4a_expand_bn[0][0]          
__________________________________________________________________________________________________
block4a_dwconv_pad (ZeroPadding (None, 39, 57, 288)  0           block4a_expand_activation[0][0]  
__________________________________________________________________________________________________
block4a_dwconv (DepthwiseConv2D (None, 19, 28, 288)  2592        block4a_dwconv_pad[0][0]         
__________________________________________________________________________________________________
block4a_bn (BatchNormalization) (None, 19, 28, 288)  1152        block4a_dwconv[0][0]             
__________________________________________________________________________________________________
block4a_activation (Activation) (None, 19, 28, 288)  0           block4a_bn[0][0]                 
__________________________________________________________________________________________________
block4a_se_squeeze (GlobalAvera (None, 288)          0           block4a_activation[0][0]         
__________________________________________________________________________________________________
block4a_se_reshape (Reshape)    (None, 1, 1, 288)    0           block4a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block4a_se_reduce (Conv2D)      (None, 1, 1, 12)     3468        block4a_se_reshape[0][0]         
__________________________________________________________________________________________________
block4a_se_expand (Conv2D)      (None, 1, 1, 288)    3744        block4a_se_reduce[0][0]          
__________________________________________________________________________________________________
block4a_se_excite (Multiply)    (None, 19, 28, 288)  0           block4a_activation[0][0]         
                                                                 block4a_se_expand[0][0]          
__________________________________________________________________________________________________
block4a_project_conv (Conv2D)   (None, 19, 28, 96)   27648       block4a_se_excite[0][0]          
__________________________________________________________________________________________________
block4a_project_bn (BatchNormal (None, 19, 28, 96)   384         block4a_project_conv[0][0]       
__________________________________________________________________________________________________
block4b_expand_conv (Conv2D)    (None, 19, 28, 576)  55296       block4a_project_bn[0][0]         
__________________________________________________________________________________________________
block4b_expand_bn (BatchNormali (None, 19, 28, 576)  2304        block4b_expand_conv[0][0]        
__________________________________________________________________________________________________
block4b_expand_activation (Acti (None, 19, 28, 576)  0           block4b_expand_bn[0][0]          
__________________________________________________________________________________________________
block4b_dwconv (DepthwiseConv2D (None, 19, 28, 576)  5184        block4b_expand_activation[0][0]  
__________________________________________________________________________________________________
block4b_bn (BatchNormalization) (None, 19, 28, 576)  2304        block4b_dwconv[0][0]             
__________________________________________________________________________________________________
block4b_activation (Activation) (None, 19, 28, 576)  0           block4b_bn[0][0]                 
__________________________________________________________________________________________________
block4b_se_squeeze (GlobalAvera (None, 576)          0           block4b_activation[0][0]         
__________________________________________________________________________________________________
block4b_se_reshape (Reshape)    (None, 1, 1, 576)    0           block4b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block4b_se_reduce (Conv2D)      (None, 1, 1, 24)     13848       block4b_se_reshape[0][0]         
__________________________________________________________________________________________________
block4b_se_expand (Conv2D)      (None, 1, 1, 576)    14400       block4b_se_reduce[0][0]          
__________________________________________________________________________________________________
block4b_se_excite (Multiply)    (None, 19, 28, 576)  0           block4b_activation[0][0]         
                                                                 block4b_se_expand[0][0]          
__________________________________________________________________________________________________
block4b_project_conv (Conv2D)   (None, 19, 28, 96)   55296       block4b_se_excite[0][0]          
__________________________________________________________________________________________________
block4b_project_bn (BatchNormal (None, 19, 28, 96)   384         block4b_project_conv[0][0]       
__________________________________________________________________________________________________
block4b_drop (Dropout)          (None, 19, 28, 96)   0           block4b_project_bn[0][0]         
__________________________________________________________________________________________________
block4b_add (Add)               (None, 19, 28, 96)   0           block4b_drop[0][0]               
                                                                 block4a_project_bn[0][0]         
__________________________________________________________________________________________________
block4c_expand_conv (Conv2D)    (None, 19, 28, 576)  55296       block4b_add[0][0]                
__________________________________________________________________________________________________
block4c_expand_bn (BatchNormali (None, 19, 28, 576)  2304        block4c_expand_conv[0][0]        
__________________________________________________________________________________________________
block4c_expand_activation (Acti (None, 19, 28, 576)  0           block4c_expand_bn[0][0]          
__________________________________________________________________________________________________
block4c_dwconv (DepthwiseConv2D (None, 19, 28, 576)  5184        block4c_expand_activation[0][0]  
__________________________________________________________________________________________________
block4c_bn (BatchNormalization) (None, 19, 28, 576)  2304        block4c_dwconv[0][0]             
__________________________________________________________________________________________________
block4c_activation (Activation) (None, 19, 28, 576)  0           block4c_bn[0][0]                 
__________________________________________________________________________________________________
block4c_se_squeeze (GlobalAvera (None, 576)          0           block4c_activation[0][0]         
__________________________________________________________________________________________________
block4c_se_reshape (Reshape)    (None, 1, 1, 576)    0           block4c_se_squeeze[0][0]         
__________________________________________________________________________________________________
block4c_se_reduce (Conv2D)      (None, 1, 1, 24)     13848       block4c_se_reshape[0][0]         
__________________________________________________________________________________________________
block4c_se_expand (Conv2D)      (None, 1, 1, 576)    14400       block4c_se_reduce[0][0]          
__________________________________________________________________________________________________
block4c_se_excite (Multiply)    (None, 19, 28, 576)  0           block4c_activation[0][0]         
                                                                 block4c_se_expand[0][0]          
__________________________________________________________________________________________________
block4c_project_conv (Conv2D)   (None, 19, 28, 96)   55296       block4c_se_excite[0][0]          
__________________________________________________________________________________________________
block4c_project_bn (BatchNormal (None, 19, 28, 96)   384         block4c_project_conv[0][0]       
__________________________________________________________________________________________________
block4c_drop (Dropout)          (None, 19, 28, 96)   0           block4c_project_bn[0][0]         
__________________________________________________________________________________________________
block4c_add (Add)               (None, 19, 28, 96)   0           block4c_drop[0][0]               
                                                                 block4b_add[0][0]                
__________________________________________________________________________________________________
block4d_expand_conv (Conv2D)    (None, 19, 28, 576)  55296       block4c_add[0][0]                
__________________________________________________________________________________________________
block4d_expand_bn (BatchNormali (None, 19, 28, 576)  2304        block4d_expand_conv[0][0]        
__________________________________________________________________________________________________
block4d_expand_activation (Acti (None, 19, 28, 576)  0           block4d_expand_bn[0][0]          
__________________________________________________________________________________________________
block4d_dwconv (DepthwiseConv2D (None, 19, 28, 576)  5184        block4d_expand_activation[0][0]  
__________________________________________________________________________________________________
block4d_bn (BatchNormalization) (None, 19, 28, 576)  2304        block4d_dwconv[0][0]             
__________________________________________________________________________________________________
block4d_activation (Activation) (None, 19, 28, 576)  0           block4d_bn[0][0]                 
__________________________________________________________________________________________________
block4d_se_squeeze (GlobalAvera (None, 576)          0           block4d_activation[0][0]         
__________________________________________________________________________________________________
block4d_se_reshape (Reshape)    (None, 1, 1, 576)    0           block4d_se_squeeze[0][0]         
__________________________________________________________________________________________________
block4d_se_reduce (Conv2D)      (None, 1, 1, 24)     13848       block4d_se_reshape[0][0]         
__________________________________________________________________________________________________
block4d_se_expand (Conv2D)      (None, 1, 1, 576)    14400       block4d_se_reduce[0][0]          
__________________________________________________________________________________________________
block4d_se_excite (Multiply)    (None, 19, 28, 576)  0           block4d_activation[0][0]         
                                                                 block4d_se_expand[0][0]          
__________________________________________________________________________________________________
block4d_project_conv (Conv2D)   (None, 19, 28, 96)   55296       block4d_se_excite[0][0]          
__________________________________________________________________________________________________
block4d_project_bn (BatchNormal (None, 19, 28, 96)   384         block4d_project_conv[0][0]       
__________________________________________________________________________________________________
block4d_drop (Dropout)          (None, 19, 28, 96)   0           block4d_project_bn[0][0]         
__________________________________________________________________________________________________
block4d_add (Add)               (None, 19, 28, 96)   0           block4d_drop[0][0]               
                                                                 block4c_add[0][0]                
__________________________________________________________________________________________________
block4e_expand_conv (Conv2D)    (None, 19, 28, 576)  55296       block4d_add[0][0]                
__________________________________________________________________________________________________
block4e_expand_bn (BatchNormali (None, 19, 28, 576)  2304        block4e_expand_conv[0][0]        
__________________________________________________________________________________________________
block4e_expand_activation (Acti (None, 19, 28, 576)  0           block4e_expand_bn[0][0]          
__________________________________________________________________________________________________
block4e_dwconv (DepthwiseConv2D (None, 19, 28, 576)  5184        block4e_expand_activation[0][0]  
__________________________________________________________________________________________________
block4e_bn (BatchNormalization) (None, 19, 28, 576)  2304        block4e_dwconv[0][0]             
__________________________________________________________________________________________________
block4e_activation (Activation) (None, 19, 28, 576)  0           block4e_bn[0][0]                 
__________________________________________________________________________________________________
block4e_se_squeeze (GlobalAvera (None, 576)          0           block4e_activation[0][0]         
__________________________________________________________________________________________________
block4e_se_reshape (Reshape)    (None, 1, 1, 576)    0           block4e_se_squeeze[0][0]         
__________________________________________________________________________________________________
block4e_se_reduce (Conv2D)      (None, 1, 1, 24)     13848       block4e_se_reshape[0][0]         
__________________________________________________________________________________________________
block4e_se_expand (Conv2D)      (None, 1, 1, 576)    14400       block4e_se_reduce[0][0]          
__________________________________________________________________________________________________
block4e_se_excite (Multiply)    (None, 19, 28, 576)  0           block4e_activation[0][0]         
                                                                 block4e_se_expand[0][0]          
__________________________________________________________________________________________________
block4e_project_conv (Conv2D)   (None, 19, 28, 96)   55296       block4e_se_excite[0][0]          
__________________________________________________________________________________________________
block4e_project_bn (BatchNormal (None, 19, 28, 96)   384         block4e_project_conv[0][0]       
__________________________________________________________________________________________________
block4e_drop (Dropout)          (None, 19, 28, 96)   0           block4e_project_bn[0][0]         
__________________________________________________________________________________________________
block4e_add (Add)               (None, 19, 28, 96)   0           block4e_drop[0][0]               
                                                                 block4d_add[0][0]                
__________________________________________________________________________________________________
block5a_expand_conv (Conv2D)    (None, 19, 28, 576)  55296       block4e_add[0][0]                
__________________________________________________________________________________________________
block5a_expand_bn (BatchNormali (None, 19, 28, 576)  2304        block5a_expand_conv[0][0]        
__________________________________________________________________________________________________
block5a_expand_activation (Acti (None, 19, 28, 576)  0           block5a_expand_bn[0][0]          
__________________________________________________________________________________________________
block5a_dwconv (DepthwiseConv2D (None, 19, 28, 576)  14400       block5a_expand_activation[0][0]  
__________________________________________________________________________________________________
block5a_bn (BatchNormalization) (None, 19, 28, 576)  2304        block5a_dwconv[0][0]             
__________________________________________________________________________________________________
block5a_activation (Activation) (None, 19, 28, 576)  0           block5a_bn[0][0]                 
__________________________________________________________________________________________________
block5a_se_squeeze (GlobalAvera (None, 576)          0           block5a_activation[0][0]         
__________________________________________________________________________________________________
block5a_se_reshape (Reshape)    (None, 1, 1, 576)    0           block5a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block5a_se_reduce (Conv2D)      (None, 1, 1, 24)     13848       block5a_se_reshape[0][0]         
__________________________________________________________________________________________________
block5a_se_expand (Conv2D)      (None, 1, 1, 576)    14400       block5a_se_reduce[0][0]          
__________________________________________________________________________________________________
block5a_se_excite (Multiply)    (None, 19, 28, 576)  0           block5a_activation[0][0]         
                                                                 block5a_se_expand[0][0]          
__________________________________________________________________________________________________
block5a_project_conv (Conv2D)   (None, 19, 28, 136)  78336       block5a_se_excite[0][0]          
__________________________________________________________________________________________________
block5a_project_bn (BatchNormal (None, 19, 28, 136)  544         block5a_project_conv[0][0]       
__________________________________________________________________________________________________
block5b_expand_conv (Conv2D)    (None, 19, 28, 816)  110976      block5a_project_bn[0][0]         
__________________________________________________________________________________________________
block5b_expand_bn (BatchNormali (None, 19, 28, 816)  3264        block5b_expand_conv[0][0]        
__________________________________________________________________________________________________
block5b_expand_activation (Acti (None, 19, 28, 816)  0           block5b_expand_bn[0][0]          
__________________________________________________________________________________________________
block5b_dwconv (DepthwiseConv2D (None, 19, 28, 816)  20400       block5b_expand_activation[0][0]  
__________________________________________________________________________________________________
block5b_bn (BatchNormalization) (None, 19, 28, 816)  3264        block5b_dwconv[0][0]             
__________________________________________________________________________________________________
block5b_activation (Activation) (None, 19, 28, 816)  0           block5b_bn[0][0]                 
__________________________________________________________________________________________________
block5b_se_squeeze (GlobalAvera (None, 816)          0           block5b_activation[0][0]         
__________________________________________________________________________________________________
block5b_se_reshape (Reshape)    (None, 1, 1, 816)    0           block5b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block5b_se_reduce (Conv2D)      (None, 1, 1, 34)     27778       block5b_se_reshape[0][0]         
__________________________________________________________________________________________________
block5b_se_expand (Conv2D)      (None, 1, 1, 816)    28560       block5b_se_reduce[0][0]          
__________________________________________________________________________________________________
block5b_se_excite (Multiply)    (None, 19, 28, 816)  0           block5b_activation[0][0]         
                                                                 block5b_se_expand[0][0]          
__________________________________________________________________________________________________
block5b_project_conv (Conv2D)   (None, 19, 28, 136)  110976      block5b_se_excite[0][0]          
__________________________________________________________________________________________________
block5b_project_bn (BatchNormal (None, 19, 28, 136)  544         block5b_project_conv[0][0]       
__________________________________________________________________________________________________
block5b_drop (Dropout)          (None, 19, 28, 136)  0           block5b_project_bn[0][0]         
__________________________________________________________________________________________________
block5b_add (Add)               (None, 19, 28, 136)  0           block5b_drop[0][0]               
                                                                 block5a_project_bn[0][0]         
__________________________________________________________________________________________________
block5c_expand_conv (Conv2D)    (None, 19, 28, 816)  110976      block5b_add[0][0]                
__________________________________________________________________________________________________
block5c_expand_bn (BatchNormali (None, 19, 28, 816)  3264        block5c_expand_conv[0][0]        
__________________________________________________________________________________________________
block5c_expand_activation (Acti (None, 19, 28, 816)  0           block5c_expand_bn[0][0]          
__________________________________________________________________________________________________
block5c_dwconv (DepthwiseConv2D (None, 19, 28, 816)  20400       block5c_expand_activation[0][0]  
__________________________________________________________________________________________________
block5c_bn (BatchNormalization) (None, 19, 28, 816)  3264        block5c_dwconv[0][0]             
__________________________________________________________________________________________________
block5c_activation (Activation) (None, 19, 28, 816)  0           block5c_bn[0][0]                 
__________________________________________________________________________________________________
block5c_se_squeeze (GlobalAvera (None, 816)          0           block5c_activation[0][0]         
__________________________________________________________________________________________________
block5c_se_reshape (Reshape)    (None, 1, 1, 816)    0           block5c_se_squeeze[0][0]         
__________________________________________________________________________________________________
block5c_se_reduce (Conv2D)      (None, 1, 1, 34)     27778       block5c_se_reshape[0][0]         
__________________________________________________________________________________________________
block5c_se_expand (Conv2D)      (None, 1, 1, 816)    28560       block5c_se_reduce[0][0]          
__________________________________________________________________________________________________
block5c_se_excite (Multiply)    (None, 19, 28, 816)  0           block5c_activation[0][0]         
                                                                 block5c_se_expand[0][0]          
__________________________________________________________________________________________________
block5c_project_conv (Conv2D)   (None, 19, 28, 136)  110976      block5c_se_excite[0][0]          
__________________________________________________________________________________________________
block5c_project_bn (BatchNormal (None, 19, 28, 136)  544         block5c_project_conv[0][0]       
__________________________________________________________________________________________________
block5c_drop (Dropout)          (None, 19, 28, 136)  0           block5c_project_bn[0][0]         
__________________________________________________________________________________________________
block5c_add (Add)               (None, 19, 28, 136)  0           block5c_drop[0][0]               
                                                                 block5b_add[0][0]                
__________________________________________________________________________________________________
block5d_expand_conv (Conv2D)    (None, 19, 28, 816)  110976      block5c_add[0][0]                
__________________________________________________________________________________________________
block5d_expand_bn (BatchNormali (None, 19, 28, 816)  3264        block5d_expand_conv[0][0]        
__________________________________________________________________________________________________
block5d_expand_activation (Acti (None, 19, 28, 816)  0           block5d_expand_bn[0][0]          
__________________________________________________________________________________________________
block5d_dwconv (DepthwiseConv2D (None, 19, 28, 816)  20400       block5d_expand_activation[0][0]  
__________________________________________________________________________________________________
block5d_bn (BatchNormalization) (None, 19, 28, 816)  3264        block5d_dwconv[0][0]             
__________________________________________________________________________________________________
block5d_activation (Activation) (None, 19, 28, 816)  0           block5d_bn[0][0]                 
__________________________________________________________________________________________________
block5d_se_squeeze (GlobalAvera (None, 816)          0           block5d_activation[0][0]         
__________________________________________________________________________________________________
block5d_se_reshape (Reshape)    (None, 1, 1, 816)    0           block5d_se_squeeze[0][0]         
__________________________________________________________________________________________________
block5d_se_reduce (Conv2D)      (None, 1, 1, 34)     27778       block5d_se_reshape[0][0]         
__________________________________________________________________________________________________
block5d_se_expand (Conv2D)      (None, 1, 1, 816)    28560       block5d_se_reduce[0][0]          
__________________________________________________________________________________________________
block5d_se_excite (Multiply)    (None, 19, 28, 816)  0           block5d_activation[0][0]         
                                                                 block5d_se_expand[0][0]          
__________________________________________________________________________________________________
block5d_project_conv (Conv2D)   (None, 19, 28, 136)  110976      block5d_se_excite[0][0]          
__________________________________________________________________________________________________
block5d_project_bn (BatchNormal (None, 19, 28, 136)  544         block5d_project_conv[0][0]       
__________________________________________________________________________________________________
block5d_drop (Dropout)          (None, 19, 28, 136)  0           block5d_project_bn[0][0]         
__________________________________________________________________________________________________
block5d_add (Add)               (None, 19, 28, 136)  0           block5d_drop[0][0]               
                                                                 block5c_add[0][0]                
__________________________________________________________________________________________________
block5e_expand_conv (Conv2D)    (None, 19, 28, 816)  110976      block5d_add[0][0]                
__________________________________________________________________________________________________
block5e_expand_bn (BatchNormali (None, 19, 28, 816)  3264        block5e_expand_conv[0][0]        
__________________________________________________________________________________________________
block5e_expand_activation (Acti (None, 19, 28, 816)  0           block5e_expand_bn[0][0]          
__________________________________________________________________________________________________
block5e_dwconv (DepthwiseConv2D (None, 19, 28, 816)  20400       block5e_expand_activation[0][0]  
__________________________________________________________________________________________________
block5e_bn (BatchNormalization) (None, 19, 28, 816)  3264        block5e_dwconv[0][0]             
__________________________________________________________________________________________________
block5e_activation (Activation) (None, 19, 28, 816)  0           block5e_bn[0][0]                 
__________________________________________________________________________________________________
block5e_se_squeeze (GlobalAvera (None, 816)          0           block5e_activation[0][0]         
__________________________________________________________________________________________________
block5e_se_reshape (Reshape)    (None, 1, 1, 816)    0           block5e_se_squeeze[0][0]         
__________________________________________________________________________________________________
block5e_se_reduce (Conv2D)      (None, 1, 1, 34)     27778       block5e_se_reshape[0][0]         
__________________________________________________________________________________________________
block5e_se_expand (Conv2D)      (None, 1, 1, 816)    28560       block5e_se_reduce[0][0]          
__________________________________________________________________________________________________
block5e_se_excite (Multiply)    (None, 19, 28, 816)  0           block5e_activation[0][0]         
                                                                 block5e_se_expand[0][0]          
__________________________________________________________________________________________________
block5e_project_conv (Conv2D)   (None, 19, 28, 136)  110976      block5e_se_excite[0][0]          
__________________________________________________________________________________________________
block5e_project_bn (BatchNormal (None, 19, 28, 136)  544         block5e_project_conv[0][0]       
__________________________________________________________________________________________________
block5e_drop (Dropout)          (None, 19, 28, 136)  0           block5e_project_bn[0][0]         
__________________________________________________________________________________________________
block5e_add (Add)               (None, 19, 28, 136)  0           block5e_drop[0][0]               
                                                                 block5d_add[0][0]                
__________________________________________________________________________________________________
block6a_expand_conv (Conv2D)    (None, 19, 28, 816)  110976      block5e_add[0][0]                
__________________________________________________________________________________________________
block6a_expand_bn (BatchNormali (None, 19, 28, 816)  3264        block6a_expand_conv[0][0]        
__________________________________________________________________________________________________
block6a_expand_activation (Acti (None, 19, 28, 816)  0           block6a_expand_bn[0][0]          
__________________________________________________________________________________________________
block6a_dwconv_pad (ZeroPadding (None, 23, 31, 816)  0           block6a_expand_activation[0][0]  
__________________________________________________________________________________________________
block6a_dwconv (DepthwiseConv2D (None, 10, 14, 816)  20400       block6a_dwconv_pad[0][0]         
__________________________________________________________________________________________________
block6a_bn (BatchNormalization) (None, 10, 14, 816)  3264        block6a_dwconv[0][0]             
__________________________________________________________________________________________________
block6a_activation (Activation) (None, 10, 14, 816)  0           block6a_bn[0][0]                 
__________________________________________________________________________________________________
block6a_se_squeeze (GlobalAvera (None, 816)          0           block6a_activation[0][0]         
__________________________________________________________________________________________________
block6a_se_reshape (Reshape)    (None, 1, 1, 816)    0           block6a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6a_se_reduce (Conv2D)      (None, 1, 1, 34)     27778       block6a_se_reshape[0][0]         
__________________________________________________________________________________________________
block6a_se_expand (Conv2D)      (None, 1, 1, 816)    28560       block6a_se_reduce[0][0]          
__________________________________________________________________________________________________
block6a_se_excite (Multiply)    (None, 10, 14, 816)  0           block6a_activation[0][0]         
                                                                 block6a_se_expand[0][0]          
__________________________________________________________________________________________________
block6a_project_conv (Conv2D)   (None, 10, 14, 232)  189312      block6a_se_excite[0][0]          
__________________________________________________________________________________________________
block6a_project_bn (BatchNormal (None, 10, 14, 232)  928         block6a_project_conv[0][0]       
__________________________________________________________________________________________________
block6b_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6a_project_bn[0][0]         
__________________________________________________________________________________________________
block6b_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block6b_expand_conv[0][0]        
__________________________________________________________________________________________________
block6b_expand_activation (Acti (None, 10, 14, 1392) 0           block6b_expand_bn[0][0]          
__________________________________________________________________________________________________
block6b_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 34800       block6b_expand_activation[0][0]  
__________________________________________________________________________________________________
block6b_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block6b_dwconv[0][0]             
__________________________________________________________________________________________________
block6b_activation (Activation) (None, 10, 14, 1392) 0           block6b_bn[0][0]                 
__________________________________________________________________________________________________
block6b_se_squeeze (GlobalAvera (None, 1392)         0           block6b_activation[0][0]         
__________________________________________________________________________________________________
block6b_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block6b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6b_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block6b_se_reshape[0][0]         
__________________________________________________________________________________________________
block6b_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block6b_se_reduce[0][0]          
__________________________________________________________________________________________________
block6b_se_excite (Multiply)    (None, 10, 14, 1392) 0           block6b_activation[0][0]         
                                                                 block6b_se_expand[0][0]          
__________________________________________________________________________________________________
block6b_project_conv (Conv2D)   (None, 10, 14, 232)  322944      block6b_se_excite[0][0]          
__________________________________________________________________________________________________
block6b_project_bn (BatchNormal (None, 10, 14, 232)  928         block6b_project_conv[0][0]       
__________________________________________________________________________________________________
block6b_drop (Dropout)          (None, 10, 14, 232)  0           block6b_project_bn[0][0]         
__________________________________________________________________________________________________
block6b_add (Add)               (None, 10, 14, 232)  0           block6b_drop[0][0]               
                                                                 block6a_project_bn[0][0]         
__________________________________________________________________________________________________
block6c_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6b_add[0][0]                
__________________________________________________________________________________________________
block6c_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block6c_expand_conv[0][0]        
__________________________________________________________________________________________________
block6c_expand_activation (Acti (None, 10, 14, 1392) 0           block6c_expand_bn[0][0]          
__________________________________________________________________________________________________
block6c_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 34800       block6c_expand_activation[0][0]  
__________________________________________________________________________________________________
block6c_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block6c_dwconv[0][0]             
__________________________________________________________________________________________________
block6c_activation (Activation) (None, 10, 14, 1392) 0           block6c_bn[0][0]                 
__________________________________________________________________________________________________
block6c_se_squeeze (GlobalAvera (None, 1392)         0           block6c_activation[0][0]         
__________________________________________________________________________________________________
block6c_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block6c_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6c_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block6c_se_reshape[0][0]         
__________________________________________________________________________________________________
block6c_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block6c_se_reduce[0][0]          
__________________________________________________________________________________________________
block6c_se_excite (Multiply)    (None, 10, 14, 1392) 0           block6c_activation[0][0]         
                                                                 block6c_se_expand[0][0]          
__________________________________________________________________________________________________
block6c_project_conv (Conv2D)   (None, 10, 14, 232)  322944      block6c_se_excite[0][0]          
__________________________________________________________________________________________________
block6c_project_bn (BatchNormal (None, 10, 14, 232)  928         block6c_project_conv[0][0]       
__________________________________________________________________________________________________
block6c_drop (Dropout)          (None, 10, 14, 232)  0           block6c_project_bn[0][0]         
__________________________________________________________________________________________________
block6c_add (Add)               (None, 10, 14, 232)  0           block6c_drop[0][0]               
                                                                 block6b_add[0][0]                
__________________________________________________________________________________________________
block6d_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6c_add[0][0]                
__________________________________________________________________________________________________
block6d_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block6d_expand_conv[0][0]        
__________________________________________________________________________________________________
block6d_expand_activation (Acti (None, 10, 14, 1392) 0           block6d_expand_bn[0][0]          
__________________________________________________________________________________________________
block6d_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 34800       block6d_expand_activation[0][0]  
__________________________________________________________________________________________________
block6d_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block6d_dwconv[0][0]             
__________________________________________________________________________________________________
block6d_activation (Activation) (None, 10, 14, 1392) 0           block6d_bn[0][0]                 
__________________________________________________________________________________________________
block6d_se_squeeze (GlobalAvera (None, 1392)         0           block6d_activation[0][0]         
__________________________________________________________________________________________________
block6d_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block6d_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6d_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block6d_se_reshape[0][0]         
__________________________________________________________________________________________________
block6d_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block6d_se_reduce[0][0]          
__________________________________________________________________________________________________
block6d_se_excite (Multiply)    (None, 10, 14, 1392) 0           block6d_activation[0][0]         
                                                                 block6d_se_expand[0][0]          
__________________________________________________________________________________________________
block6d_project_conv (Conv2D)   (None, 10, 14, 232)  322944      block6d_se_excite[0][0]          
__________________________________________________________________________________________________
block6d_project_bn (BatchNormal (None, 10, 14, 232)  928         block6d_project_conv[0][0]       
__________________________________________________________________________________________________
block6d_drop (Dropout)          (None, 10, 14, 232)  0           block6d_project_bn[0][0]         
__________________________________________________________________________________________________
block6d_add (Add)               (None, 10, 14, 232)  0           block6d_drop[0][0]               
                                                                 block6c_add[0][0]                
__________________________________________________________________________________________________
block6e_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6d_add[0][0]                
__________________________________________________________________________________________________
block6e_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block6e_expand_conv[0][0]        
__________________________________________________________________________________________________
block6e_expand_activation (Acti (None, 10, 14, 1392) 0           block6e_expand_bn[0][0]          
__________________________________________________________________________________________________
block6e_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 34800       block6e_expand_activation[0][0]  
__________________________________________________________________________________________________
block6e_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block6e_dwconv[0][0]             
__________________________________________________________________________________________________
block6e_activation (Activation) (None, 10, 14, 1392) 0           block6e_bn[0][0]                 
__________________________________________________________________________________________________
block6e_se_squeeze (GlobalAvera (None, 1392)         0           block6e_activation[0][0]         
__________________________________________________________________________________________________
block6e_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block6e_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6e_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block6e_se_reshape[0][0]         
__________________________________________________________________________________________________
block6e_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block6e_se_reduce[0][0]          
__________________________________________________________________________________________________
block6e_se_excite (Multiply)    (None, 10, 14, 1392) 0           block6e_activation[0][0]         
                                                                 block6e_se_expand[0][0]          
__________________________________________________________________________________________________
block6e_project_conv (Conv2D)   (None, 10, 14, 232)  322944      block6e_se_excite[0][0]          
__________________________________________________________________________________________________
block6e_project_bn (BatchNormal (None, 10, 14, 232)  928         block6e_project_conv[0][0]       
__________________________________________________________________________________________________
block6e_drop (Dropout)          (None, 10, 14, 232)  0           block6e_project_bn[0][0]         
__________________________________________________________________________________________________
block6e_add (Add)               (None, 10, 14, 232)  0           block6e_drop[0][0]               
                                                                 block6d_add[0][0]                
__________________________________________________________________________________________________
block6f_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6e_add[0][0]                
__________________________________________________________________________________________________
block6f_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block6f_expand_conv[0][0]        
__________________________________________________________________________________________________
block6f_expand_activation (Acti (None, 10, 14, 1392) 0           block6f_expand_bn[0][0]          
__________________________________________________________________________________________________
block6f_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 34800       block6f_expand_activation[0][0]  
__________________________________________________________________________________________________
block6f_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block6f_dwconv[0][0]             
__________________________________________________________________________________________________
block6f_activation (Activation) (None, 10, 14, 1392) 0           block6f_bn[0][0]                 
__________________________________________________________________________________________________
block6f_se_squeeze (GlobalAvera (None, 1392)         0           block6f_activation[0][0]         
__________________________________________________________________________________________________
block6f_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block6f_se_squeeze[0][0]         
__________________________________________________________________________________________________
block6f_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block6f_se_reshape[0][0]         
__________________________________________________________________________________________________
block6f_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block6f_se_reduce[0][0]          
__________________________________________________________________________________________________
block6f_se_excite (Multiply)    (None, 10, 14, 1392) 0           block6f_activation[0][0]         
                                                                 block6f_se_expand[0][0]          
__________________________________________________________________________________________________
block6f_project_conv (Conv2D)   (None, 10, 14, 232)  322944      block6f_se_excite[0][0]          
__________________________________________________________________________________________________
block6f_project_bn (BatchNormal (None, 10, 14, 232)  928         block6f_project_conv[0][0]       
__________________________________________________________________________________________________
block6f_drop (Dropout)          (None, 10, 14, 232)  0           block6f_project_bn[0][0]         
__________________________________________________________________________________________________
block6f_add (Add)               (None, 10, 14, 232)  0           block6f_drop[0][0]               
                                                                 block6e_add[0][0]                
__________________________________________________________________________________________________
block7a_expand_conv (Conv2D)    (None, 10, 14, 1392) 322944      block6f_add[0][0]                
__________________________________________________________________________________________________
block7a_expand_bn (BatchNormali (None, 10, 14, 1392) 5568        block7a_expand_conv[0][0]        
__________________________________________________________________________________________________
block7a_expand_activation (Acti (None, 10, 14, 1392) 0           block7a_expand_bn[0][0]          
__________________________________________________________________________________________________
block7a_dwconv (DepthwiseConv2D (None, 10, 14, 1392) 12528       block7a_expand_activation[0][0]  
__________________________________________________________________________________________________
block7a_bn (BatchNormalization) (None, 10, 14, 1392) 5568        block7a_dwconv[0][0]             
__________________________________________________________________________________________________
block7a_activation (Activation) (None, 10, 14, 1392) 0           block7a_bn[0][0]                 
__________________________________________________________________________________________________
block7a_se_squeeze (GlobalAvera (None, 1392)         0           block7a_activation[0][0]         
__________________________________________________________________________________________________
block7a_se_reshape (Reshape)    (None, 1, 1, 1392)   0           block7a_se_squeeze[0][0]         
__________________________________________________________________________________________________
block7a_se_reduce (Conv2D)      (None, 1, 1, 58)     80794       block7a_se_reshape[0][0]         
__________________________________________________________________________________________________
block7a_se_expand (Conv2D)      (None, 1, 1, 1392)   82128       block7a_se_reduce[0][0]          
__________________________________________________________________________________________________
block7a_se_excite (Multiply)    (None, 10, 14, 1392) 0           block7a_activation[0][0]         
                                                                 block7a_se_expand[0][0]          
__________________________________________________________________________________________________
block7a_project_conv (Conv2D)   (None, 10, 14, 384)  534528      block7a_se_excite[0][0]          
__________________________________________________________________________________________________
block7a_project_bn (BatchNormal (None, 10, 14, 384)  1536        block7a_project_conv[0][0]       
__________________________________________________________________________________________________
block7b_expand_conv (Conv2D)    (None, 10, 14, 2304) 884736      block7a_project_bn[0][0]         
__________________________________________________________________________________________________
block7b_expand_bn (BatchNormali (None, 10, 14, 2304) 9216        block7b_expand_conv[0][0]        
__________________________________________________________________________________________________
block7b_expand_activation (Acti (None, 10, 14, 2304) 0           block7b_expand_bn[0][0]          
__________________________________________________________________________________________________
block7b_dwconv (DepthwiseConv2D (None, 10, 14, 2304) 20736       block7b_expand_activation[0][0]  
__________________________________________________________________________________________________
block7b_bn (BatchNormalization) (None, 10, 14, 2304) 9216        block7b_dwconv[0][0]             
__________________________________________________________________________________________________
block7b_activation (Activation) (None, 10, 14, 2304) 0           block7b_bn[0][0]                 
__________________________________________________________________________________________________
block7b_se_squeeze (GlobalAvera (None, 2304)         0           block7b_activation[0][0]         
__________________________________________________________________________________________________
block7b_se_reshape (Reshape)    (None, 1, 1, 2304)   0           block7b_se_squeeze[0][0]         
__________________________________________________________________________________________________
block7b_se_reduce (Conv2D)      (None, 1, 1, 96)     221280      block7b_se_reshape[0][0]         
__________________________________________________________________________________________________
block7b_se_expand (Conv2D)      (None, 1, 1, 2304)   223488      block7b_se_reduce[0][0]          
__________________________________________________________________________________________________
block7b_se_excite (Multiply)    (None, 10, 14, 2304) 0           block7b_activation[0][0]         
                                                                 block7b_se_expand[0][0]          
__________________________________________________________________________________________________
block7b_project_conv (Conv2D)   (None, 10, 14, 384)  884736      block7b_se_excite[0][0]          
__________________________________________________________________________________________________
block7b_project_bn (BatchNormal (None, 10, 14, 384)  1536        block7b_project_conv[0][0]       
__________________________________________________________________________________________________
block7b_drop (Dropout)          (None, 10, 14, 384)  0           block7b_project_bn[0][0]         
__________________________________________________________________________________________________
block7b_add (Add)               (None, 10, 14, 384)  0           block7b_drop[0][0]               
                                                                 block7a_project_bn[0][0]         
__________________________________________________________________________________________________
top_conv (Conv2D)               (None, 10, 14, 1536) 589824      block7b_add[0][0]                
__________________________________________________________________________________________________
top_bn (BatchNormalization)     (None, 10, 14, 1536) 6144        top_conv[0][0]                   
__________________________________________________________________________________________________
top_activation (Activation)     (None, 10, 14, 1536) 0           top_bn[0][0]                     
__________________________________________________________________________________________________
max_pool (GlobalMaxPooling2D)   (None, 1536)         0           top_activation[0][0]             
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 1536)         6144        max_pool[0][0]                   
__________________________________________________________________________________________________
dense (Dense)                   (None, 256)          393472      batch_normalization[0][0]        
__________________________________________________________________________________________________
dropout (Dropout)               (None, 256)          0           dense[0][0]                      
__________________________________________________________________________________________________
dense_1 (Dense)                 (None, 4)            1028        dropout[0][0]                    
==================================================================================================
Total params: 11,184,179
Trainable params: 11,093,804
Non-trainable params: 90,375
__________________________________________________________________________________________________
In [19]:
model.save('MP')
In [20]:
!zip -r file.zip /kaggle/working/MP
  adding: kaggle/working/MP/ (stored 0%)
  adding: kaggle/working/MP/variables/ (stored 0%)
  adding: kaggle/working/MP/variables/variables.data-00000-of-00001 (deflated 9%)
  adding: kaggle/working/MP/variables/variables.index (deflated 79%)
  adding: kaggle/working/MP/saved_model.pb (deflated 92%)
  adding: kaggle/working/MP/assets/ (stored 0%)
  adding: kaggle/working/MP/keras_metadata.pb (deflated 96%)
In [21]:
model.layers
Out[21]:
[<keras.engine.input_layer.InputLayer at 0x7c9dba0dddd0>,
 <keras.layers.preprocessing.image_preprocessing.Rescaling at 0x7c9db81e2bd0>,
 <keras.layers.preprocessing.normalization.Normalization at 0x7c9db89f95d0>,
 <keras.layers.convolutional.ZeroPadding2D at 0x7c9db9f84cd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db81be490>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db83c1c90>,
 <keras.layers.core.Activation at 0x7c9db834eed0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8335890>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db833d4d0>,
 <keras.layers.core.Activation at 0x7c9db82d38d0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db82da850>,
 <keras.layers.core.Reshape at 0x7c9db82ea3d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db82eaf90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db82ef4d0>,
 <keras.layers.merge.Multiply at 0x7c9db8300150>,
 <keras.layers.convolutional.Conv2D at 0x7c9db82efcd0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8289250>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8292d50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db828eb90>,
 <keras.layers.core.Activation at 0x7c9db82ea750>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db83c1550>,
 <keras.layers.core.Reshape at 0x7c9db8385e90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db831ead0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db829e050>,
 <keras.layers.merge.Multiply at 0x7c9db82a5c90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8305b10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db82ae290>,
 <keras.layers.core.Dropout at 0x7c9db82b5550>,
 <keras.layers.merge.Add at 0x7c9db831e590>,
 <keras.layers.convolutional.Conv2D at 0x7c9db82bc250>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db81fe350>,
 <keras.layers.core.Activation at 0x7c9db82c6310>,
 <keras.layers.convolutional.ZeroPadding2D at 0x7c9db8a73cd0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8212290>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8212f10>,
 <keras.layers.core.Activation at 0x7c9db82065d0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8218b90>,
 <keras.layers.core.Reshape at 0x7c9db8225290>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8221cd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db822cfd0>,
 <keras.layers.merge.Multiply at 0x7c9db817d8d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db822c1d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8187790>,
 <keras.layers.convolutional.Conv2D at 0x7c9db818bad0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db81879d0>,
 <keras.layers.core.Activation at 0x7c9db8199550>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db81a2210>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db81a5710>,
 <keras.layers.core.Activation at 0x7c9db8181e50>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db81b1410>,
 <keras.layers.core.Reshape at 0x7c9db81ba710>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8a2bad0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db813d290>,
 <keras.layers.merge.Multiply at 0x7c9db814ae50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db813d5d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a21ed0>,
 <keras.layers.core.Dropout at 0x7c9db8163a10>,
 <keras.layers.merge.Add at 0x7c9db815d990>,
 <keras.layers.convolutional.Conv2D at 0x7c9db81a57d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db81ba110>,
 <keras.layers.core.Activation at 0x7c9db821af90>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db822c5d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db82185d0>,
 <keras.layers.core.Activation at 0x7c9db82ea7d0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8385650>,
 <keras.layers.core.Reshape at 0x7c9db81705d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8172090>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8178090>,
 <keras.layers.merge.Multiply at 0x7c9db8170fd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80fe310>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8108090>,
 <keras.layers.core.Dropout at 0x7c9db80fe5d0>,
 <keras.layers.merge.Add at 0x7c9db810c390>,
 <keras.layers.convolutional.Conv2D at 0x7c9db81151d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db811d750>,
 <keras.layers.core.Activation at 0x7c9db8115250>,
 <keras.layers.convolutional.ZeroPadding2D at 0x7c9db8112210>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db812c690>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db812cfd0>,
 <keras.layers.core.Activation at 0x7c9db810c690>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8133450>,
 <keras.layers.core.Reshape at 0x7c9db813c6d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8138b90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80c8050>,
 <keras.layers.merge.Multiply at 0x7c9db80d1cd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80d4d90>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db80dac50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80e4090>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db80eab50>,
 <keras.layers.core.Activation at 0x7c9db80efad0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db80f7650>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db80fc950>,
 <keras.layers.core.Activation at 0x7c9db80c8650>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db807f890>,
 <keras.layers.core.Reshape at 0x7c9db808f410>,
 <keras.layers.convolutional.Conv2D at 0x7c9db808cdd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80967d0>,
 <keras.layers.merge.Multiply at 0x7c9db80a4410>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80f5210>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db80eadd0>,
 <keras.layers.core.Dropout at 0x7c9db80c2290>,
 <keras.layers.merge.Add at 0x7c9db811d9d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8122490>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8096d50>,
 <keras.layers.core.Activation at 0x7c9db81fe1d0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db813d750>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8138e90>,
 <keras.layers.core.Activation at 0x7c9db80b1410>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db80b3a90>,
 <keras.layers.core.Reshape at 0x7c9db80b5810>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80b1cd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80ba0d0>,
 <keras.layers.merge.Multiply at 0x7c9db8046b90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db80b1350>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8050210>,
 <keras.layers.core.Dropout at 0x7c9db8056110>,
 <keras.layers.merge.Add at 0x7c9db8052490>,
 <keras.layers.convolutional.Conv2D at 0x7c9db805c5d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8060f50>,
 <keras.layers.core.Activation at 0x7c9db8066bd0>,
 <keras.layers.convolutional.ZeroPadding2D at 0x7c9db8066290>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db80769d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8079f50>,
 <keras.layers.core.Activation at 0x7c9db0195c90>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8079b90>,
 <keras.layers.core.Reshape at 0x7c9db01a4090>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01ae850>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01a8590>,
 <keras.layers.merge.Multiply at 0x7c9db01b6250>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01c1550>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db01b6d10>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01ca7d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db015bbd0>,
 <keras.layers.core.Activation at 0x7c9db015b190>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db01b6510>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db016ccd0>,
 <keras.layers.core.Activation at 0x7c9db0165090>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db0162a10>,
 <keras.layers.core.Reshape at 0x7c9db0182150>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01873d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0179410>,
 <keras.layers.merge.Multiply at 0x7c9db01c1a50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01a8090>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db019ab10>,
 <keras.layers.core.Dropout at 0x7c9db80565d0>,
 <keras.layers.merge.Add at 0x7c9db80ba050>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8130a10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db804aa50>,
 <keras.layers.core.Activation at 0x7c9db018d190>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db0190a10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db018e390>,
 <keras.layers.core.Activation at 0x7c9db0117410>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db0117b90>,
 <keras.layers.core.Reshape at 0x7c9db0119c90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db011fe10>,
 <keras.layers.convolutional.Conv2D at 0x7c9db012a090>,
 <keras.layers.merge.Multiply at 0x7c9db0135b50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db018ed50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db013dad0>,
 <keras.layers.core.Dropout at 0x7c9db0147610>,
 <keras.layers.merge.Add at 0x7c9db014b890>,
 <keras.layers.convolutional.Conv2D at 0x7c9db013dc50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00d4850>,
 <keras.layers.core.Activation at 0x7c9db01171d0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db00e0290>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00ea790>,
 <keras.layers.core.Activation at 0x7c9db00f0350>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db00f09d0>,
 <keras.layers.core.Reshape at 0x7c9db00fd250>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01034d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0108390>,
 <keras.layers.merge.Multiply at 0x7c9db0112150>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0113550>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db009e390>,
 <keras.layers.core.Dropout at 0x7c9db0112590>,
 <keras.layers.merge.Add at 0x7c9db00a3950>,
 <keras.layers.convolutional.Conv2D at 0x7c9db00ad110>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00b2a50>,
 <keras.layers.core.Activation at 0x7c9db00a3750>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db00b8310>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00c2e50>,
 <keras.layers.core.Activation at 0x7c9db00b8d90>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db00a5390>,
 <keras.layers.core.Reshape at 0x7c9db00d9c90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0151190>,
 <keras.layers.convolutional.Conv2D at 0x7c9db00f0bd0>,
 <keras.layers.merge.Multiply at 0x7c9db0121e90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01a8990>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00cc590>,
 <keras.layers.core.Dropout at 0x7c9db00d0e90>,
 <keras.layers.merge.Add at 0x7c9db00d12d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0057b90>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00d0a10>,
 <keras.layers.core.Activation at 0x7c9db005e090>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db0065890>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db005e750>,
 <keras.layers.core.Activation at 0x7c9db006f310>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db006fc10>,
 <keras.layers.core.Reshape at 0x7c9db0077ed0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0077d90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0083090>,
 <keras.layers.merge.Multiply at 0x7c9db008ea50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db006f5d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00189d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db001cc10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db0018c10>,
 <keras.layers.core.Activation at 0x7c9db002f150>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db00333d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db00372d0>,
 <keras.layers.core.Activation at 0x7c9db0024d10>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db0039710>,
 <keras.layers.core.Reshape at 0x7c9db0049990>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0045d90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0051550>,
 <keras.layers.merge.Multiply at 0x7c9da80a1190>,
 <keras.layers.convolutional.Conv2D at 0x7c9db00377d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da80ad110>,
 <keras.layers.core.Dropout at 0x7c9da80b0c10>,
 <keras.layers.merge.Add at 0x7c9da80b6cd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9da80ade10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da80c0e50>,
 <keras.layers.core.Activation at 0x7c9da80c0110>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9da80c8810>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da80a6d50>,
 <keras.layers.core.Activation at 0x7c9db002a3d0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db0020390>,
 <keras.layers.core.Reshape at 0x7c9db0077450>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0057590>,
 <keras.layers.convolutional.Conv2D at 0x7c9db00681d0>,
 <keras.layers.merge.Multiply at 0x7c9da80d4790>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0072f50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db0049250>,
 <keras.layers.core.Dropout at 0x7c9da80d4d90>,
 <keras.layers.merge.Add at 0x7c9da80ad910>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8069bd0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da80d4fd0>,
 <keras.layers.core.Activation at 0x7c9da8069690>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9da805a9d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da80583d0>,
 <keras.layers.core.Activation at 0x7c9da80ad410>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9da807e510>,
 <keras.layers.core.Reshape at 0x7c9da807e3d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8065950>,
 <keras.layers.convolutional.Conv2D at 0x7c9da80698d0>,
 <keras.layers.merge.Multiply at 0x7c9da801b110>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8017ad0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da8090c50>,
 <keras.layers.core.Dropout at 0x7c9da8090f10>,
 <keras.layers.merge.Add at 0x7c9da801bdd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8038cd0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da8041790>,
 <keras.layers.core.Activation at 0x7c9da8038790>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9da802fe10>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da8016f90>,
 <keras.layers.core.Activation at 0x7c9da8065050>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9da804fe90>,
 <keras.layers.core.Reshape at 0x7c9da804f4d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8045090>,
 <keras.layers.convolutional.Conv2D at 0x7c9da80389d0>,
 <keras.layers.merge.Multiply at 0x7c9d9c7d5090>,
 <keras.layers.convolutional.Conv2D at 0x7c9d9c7d0c50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9d9c7c7dd0>,
 <keras.layers.core.Dropout at 0x7c9d9c7d0050>,
 <keras.layers.merge.Add at 0x7c9da80161d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9d9c7f2e50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9d9c7fa890>,
 <keras.layers.core.Activation at 0x7c9d9c7facd0>,
 <keras.layers.convolutional.ZeroPadding2D at 0x7c9d9c7efad0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db80ba550>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9da8089050>,
 <keras.layers.core.Activation at 0x7c9db82b5050>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db813d590>,
 <keras.layers.core.Reshape at 0x7c9db82aea90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db822c050>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8133290>,
 <keras.layers.merge.Multiply at 0x7c9db8231890>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8305ad0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db81124d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db01a4ed0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8170dd0>,
 <keras.layers.core.Activation at 0x7c9db811df50>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db0190fd0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db805cf10>,
 <keras.layers.core.Activation at 0x7c9db0121fd0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db018ded0>,
 <keras.layers.core.Reshape at 0x7c9db804a210>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0140050>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8046fd0>,
 <keras.layers.merge.Multiply at 0x7c9db014b450>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0072610>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9d9c7ea710>,
 <keras.layers.core.Dropout at 0x7c9da80ad650>,
 <keras.layers.merge.Add at 0x7c9da80bcb90>,
 <keras.layers.convolutional.Conv2D at 0x7c9da80c8c50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db01792d0>,
 <keras.layers.core.Activation at 0x7c9db808cc90>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db0187610>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8158310>,
 <keras.layers.core.Activation at 0x7c9db8289790>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db82da290>,
 <keras.layers.core.Reshape at 0x7c9db81e5bd0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db81bef90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8a31fd0>,
 <keras.layers.merge.Multiply at 0x7c9db8c2c150>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8bb0710>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8c08d10>,
 <keras.layers.core.Dropout at 0x7c9db8a82a90>,
 <keras.layers.merge.Add at 0x7c9db8a76f50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8a88bd0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a1a610>,
 <keras.layers.core.Activation at 0x7c9db8a764d0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8a82310>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db80f7610>,
 <keras.layers.core.Activation at 0x7c9db8292610>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8a76310>,
 <keras.layers.core.Reshape at 0x7c9db0162110>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8172a50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db811dc10>,
 <keras.layers.merge.Multiply at 0x7c9db8e51d90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8ac82d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8ac1e90>,
 <keras.layers.core.Dropout at 0x7c9db8a58290>,
 <keras.layers.merge.Add at 0x7c9db8a21a50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8ab0810>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a21890>,
 <keras.layers.core.Activation at 0x7c9db8aa1cd0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8b11490>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8aa14d0>,
 <keras.layers.core.Activation at 0x7c9db9cf0ed0>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8adf350>,
 <keras.layers.core.Reshape at 0x7c9db8adf510>,
 <keras.layers.convolutional.Conv2D at 0x7c9db908cc50>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8b458d0>,
 <keras.layers.merge.Multiply at 0x7c9db8b56390>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8b01310>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8bb9850>,
 <keras.layers.core.Dropout at 0x7c9db8a43a90>,
 <keras.layers.merge.Add at 0x7c9db8bb9990>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8b89e50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a51410>,
 <keras.layers.core.Activation at 0x7c9db9e84e90>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8b935d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db9f69690>,
 <keras.layers.core.Activation at 0x7c9dba19d550>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8b6d5d0>,
 <keras.layers.core.Reshape at 0x7c9da80812d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9da8070810>,
 <keras.layers.convolutional.Conv2D at 0x7c9db0049a90>,
 <keras.layers.merge.Multiply at 0x7c9db0051f90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db00204d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a67d90>,
 <keras.layers.core.Dropout at 0x7c9d9c77e250>,
 <keras.layers.merge.Add at 0x7c9dba1875d0>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8bb97d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8adf4d0>,
 <keras.layers.core.Activation at 0x7c9db8b99310>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8076c50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9d9c7bf1d0>,
 <keras.layers.core.Activation at 0x7c9db8a21650>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8b56150>,
 <keras.layers.core.Reshape at 0x7c9db8a63290>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8a63d90>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8b99dd0>,
 <keras.layers.merge.Multiply at 0x7c9db8a5a910>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8a64950>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8a7e790>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8aaad50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8ad0990>,
 <keras.layers.core.Activation at 0x7c9db8aaabd0>,
 <keras.layers.convolutional.DepthwiseConv2D at 0x7c9db8abd850>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8aa93d0>,
 <keras.layers.core.Activation at 0x7c9db8a5ac50>,
 <keras.layers.pooling.GlobalAveragePooling2D at 0x7c9db8aa79d0>,
 <keras.layers.core.Reshape at 0x7c9db8a9c810>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8b04810>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8ace510>,
 <keras.layers.merge.Multiply at 0x7c9db8ae8510>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8afc6d0>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8af9a90>,
 <keras.layers.core.Dropout at 0x7c9db8ada250>,
 <keras.layers.merge.Add at 0x7c9db8bcf190>,
 <keras.layers.convolutional.Conv2D at 0x7c9db8385a50>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db8bcfd90>,
 <keras.layers.core.Activation at 0x7c9db8bc4390>,
 <keras.layers.pooling.GlobalMaxPooling2D at 0x7c9db8bbe150>,
 <keras.layers.normalization.batch_normalization.BatchNormalization at 0x7c9db89f9290>,
 <keras.layers.core.Dense at 0x7c9db8bba450>,
 <keras.layers.core.Dropout at 0x7c9db8b99cd0>,
 <keras.layers.core.Dense at 0x7c9db8bf3dd0>]
In [22]:
len(model.layers)
Out[22]:
389
In [26]:
from keras.utils.vis_utils import plot_model
plot_model(model, to_file='layers_plot.png', show_shapes=True, show_layer_names=True)
Out[26]:
In [ ]: